repository_name
stringlengths
5
67
func_path_in_repository
stringlengths
4
234
func_name
stringlengths
0
314
whole_func_string
stringlengths
52
3.87M
language
stringclasses
6 values
func_code_string
stringlengths
52
3.87M
func_documentation_string
stringlengths
1
47.2k
func_code_url
stringlengths
85
339
GiulioRossetti/dynetx
dynetx/classes/dyndigraph.py
DynDiGraph.number_of_interactions
def number_of_interactions(self, u=None, v=None, t=None): """Return the number of interaction between two nodes at time t. Parameters ---------- u, v : nodes, optional (default=all interaction) If u and v are specified, return the number of interaction between u and v. Otherwise return the total number of all interaction. t : snapshot id (default=None) If None will be returned the number of edges on the flattened graph. Returns ------- nedges : int The number of interaction in the graph. If nodes u and v are specified return the number of interaction between those nodes. If a single node is specified return None. See Also -------- size Examples -------- >>> G = dn.DynDiGraph() >>> G.add_path([0,1,2,3], t=0) >>> G.number_of_interactions() 3 >>> G.number_of_interactions(0,1, t=0) 1 >>> G.add_edge(3, 4, t=1) >>> G.number_of_interactions() 4 """ if t is None: if u is None: return int(self.size()) elif u is not None and v is not None: if v in self._succ[u]: return 1 else: return 0 else: if u is None: return int(self.size(t)) elif u is not None and v is not None: if v in self._succ[u]: if self.__presence_test(u, v, t): return 1 else: return 0
python
def number_of_interactions(self, u=None, v=None, t=None): """Return the number of interaction between two nodes at time t. Parameters ---------- u, v : nodes, optional (default=all interaction) If u and v are specified, return the number of interaction between u and v. Otherwise return the total number of all interaction. t : snapshot id (default=None) If None will be returned the number of edges on the flattened graph. Returns ------- nedges : int The number of interaction in the graph. If nodes u and v are specified return the number of interaction between those nodes. If a single node is specified return None. See Also -------- size Examples -------- >>> G = dn.DynDiGraph() >>> G.add_path([0,1,2,3], t=0) >>> G.number_of_interactions() 3 >>> G.number_of_interactions(0,1, t=0) 1 >>> G.add_edge(3, 4, t=1) >>> G.number_of_interactions() 4 """ if t is None: if u is None: return int(self.size()) elif u is not None and v is not None: if v in self._succ[u]: return 1 else: return 0 else: if u is None: return int(self.size(t)) elif u is not None and v is not None: if v in self._succ[u]: if self.__presence_test(u, v, t): return 1 else: return 0
Return the number of interaction between two nodes at time t. Parameters ---------- u, v : nodes, optional (default=all interaction) If u and v are specified, return the number of interaction between u and v. Otherwise return the total number of all interaction. t : snapshot id (default=None) If None will be returned the number of edges on the flattened graph. Returns ------- nedges : int The number of interaction in the graph. If nodes u and v are specified return the number of interaction between those nodes. If a single node is specified return None. See Also -------- size Examples -------- >>> G = dn.DynDiGraph() >>> G.add_path([0,1,2,3], t=0) >>> G.number_of_interactions() 3 >>> G.number_of_interactions(0,1, t=0) 1 >>> G.add_edge(3, 4, t=1) >>> G.number_of_interactions() 4
https://github.com/GiulioRossetti/dynetx/blob/634e2b38f8950885aebfa079dad7d5e8d7563f1d/dynetx/classes/dyndigraph.py#L767-L816
GiulioRossetti/dynetx
dynetx/classes/dyndigraph.py
DynDiGraph.has_interaction
def has_interaction(self, u, v, t=None): """Return True if the interaction (u,v) is in the graph at time t. Parameters ---------- u, v : nodes Nodes can be, for example, strings or numbers. Nodes must be hashable (and not None) Python objects. t : snapshot id (default=None) If None will be returned the presence of the interaction on the flattened graph. Returns ------- edge_ind : bool True if interaction is in the graph, False otherwise. Examples -------- Can be called either using two nodes u,v or interaction tuple (u,v) >>> G = dn.DynDiGraph() >>> G.add_interaction(0,1, t=0) >>> G.add_interaction(1,2, t=0) >>> G.add_interaction(2,3, t=0) >>> G.has_interaction(0,1, t=0) True >>> G.has_interaction(0,1, t=1) False """ try: if t is None: return v in self._succ[u] else: return v in self._succ[u] and self.__presence_test(u, v, t) except KeyError: return False
python
def has_interaction(self, u, v, t=None): """Return True if the interaction (u,v) is in the graph at time t. Parameters ---------- u, v : nodes Nodes can be, for example, strings or numbers. Nodes must be hashable (and not None) Python objects. t : snapshot id (default=None) If None will be returned the presence of the interaction on the flattened graph. Returns ------- edge_ind : bool True if interaction is in the graph, False otherwise. Examples -------- Can be called either using two nodes u,v or interaction tuple (u,v) >>> G = dn.DynDiGraph() >>> G.add_interaction(0,1, t=0) >>> G.add_interaction(1,2, t=0) >>> G.add_interaction(2,3, t=0) >>> G.has_interaction(0,1, t=0) True >>> G.has_interaction(0,1, t=1) False """ try: if t is None: return v in self._succ[u] else: return v in self._succ[u] and self.__presence_test(u, v, t) except KeyError: return False
Return True if the interaction (u,v) is in the graph at time t. Parameters ---------- u, v : nodes Nodes can be, for example, strings or numbers. Nodes must be hashable (and not None) Python objects. t : snapshot id (default=None) If None will be returned the presence of the interaction on the flattened graph. Returns ------- edge_ind : bool True if interaction is in the graph, False otherwise. Examples -------- Can be called either using two nodes u,v or interaction tuple (u,v) >>> G = dn.DynDiGraph() >>> G.add_interaction(0,1, t=0) >>> G.add_interaction(1,2, t=0) >>> G.add_interaction(2,3, t=0) >>> G.has_interaction(0,1, t=0) True >>> G.has_interaction(0,1, t=1) False
https://github.com/GiulioRossetti/dynetx/blob/634e2b38f8950885aebfa079dad7d5e8d7563f1d/dynetx/classes/dyndigraph.py#L818-L854
GiulioRossetti/dynetx
dynetx/classes/dyndigraph.py
DynDiGraph.has_successor
def has_successor(self, u, v, t=None): """Return True if node u has successor v at time t (optional). This is true if graph has the edge u->v. Parameters ---------- u, v : nodes Nodes can be, for example, strings or numbers. Nodes must be hashable (and not None) Python objects. t : snapshot id (default=None) If None will be returned the presence of the interaction on the flattened graph. """ return self.has_interaction(u, v, t)
python
def has_successor(self, u, v, t=None): """Return True if node u has successor v at time t (optional). This is true if graph has the edge u->v. Parameters ---------- u, v : nodes Nodes can be, for example, strings or numbers. Nodes must be hashable (and not None) Python objects. t : snapshot id (default=None) If None will be returned the presence of the interaction on the flattened graph. """ return self.has_interaction(u, v, t)
Return True if node u has successor v at time t (optional). This is true if graph has the edge u->v. Parameters ---------- u, v : nodes Nodes can be, for example, strings or numbers. Nodes must be hashable (and not None) Python objects. t : snapshot id (default=None) If None will be returned the presence of the interaction on the flattened graph.
https://github.com/GiulioRossetti/dynetx/blob/634e2b38f8950885aebfa079dad7d5e8d7563f1d/dynetx/classes/dyndigraph.py#L856-L870
GiulioRossetti/dynetx
dynetx/classes/dyndigraph.py
DynDiGraph.has_predecessor
def has_predecessor(self, u, v, t=None): """Return True if node u has predecessor v at time t (optional). This is true if graph has the edge u<-v. Parameters ---------- u, v : nodes Nodes can be, for example, strings or numbers. Nodes must be hashable (and not None) Python objects. t : snapshot id (default=None) If None will be returned the presence of the interaction on the flattened graph. """ return self.has_interaction(v, u, t)
python
def has_predecessor(self, u, v, t=None): """Return True if node u has predecessor v at time t (optional). This is true if graph has the edge u<-v. Parameters ---------- u, v : nodes Nodes can be, for example, strings or numbers. Nodes must be hashable (and not None) Python objects. t : snapshot id (default=None) If None will be returned the presence of the interaction on the flattened graph. """ return self.has_interaction(v, u, t)
Return True if node u has predecessor v at time t (optional). This is true if graph has the edge u<-v. Parameters ---------- u, v : nodes Nodes can be, for example, strings or numbers. Nodes must be hashable (and not None) Python objects. t : snapshot id (default=None) If None will be returned the presence of the interaction on the flattened graph.
https://github.com/GiulioRossetti/dynetx/blob/634e2b38f8950885aebfa079dad7d5e8d7563f1d/dynetx/classes/dyndigraph.py#L872-L886
GiulioRossetti/dynetx
dynetx/classes/dyndigraph.py
DynDiGraph.successors_iter
def successors_iter(self, n, t=None): """Return an iterator over successor nodes of n at time t (optional). Parameters ---------- n : node Nodes can be, for example, strings or numbers. Nodes must be hashable (and not None) Python objects. t : snapshot id (default=None) If None will be returned the presence of the interaction on the flattened graph. """ try: if t is None: return iter(self._succ[n]) else: return iter([i for i in self._succ[n] if self.__presence_test(n, i, t)]) except KeyError: raise nx.NetworkXError("The node %s is not in the graph." % (n,))
python
def successors_iter(self, n, t=None): """Return an iterator over successor nodes of n at time t (optional). Parameters ---------- n : node Nodes can be, for example, strings or numbers. Nodes must be hashable (and not None) Python objects. t : snapshot id (default=None) If None will be returned the presence of the interaction on the flattened graph. """ try: if t is None: return iter(self._succ[n]) else: return iter([i for i in self._succ[n] if self.__presence_test(n, i, t)]) except KeyError: raise nx.NetworkXError("The node %s is not in the graph." % (n,))
Return an iterator over successor nodes of n at time t (optional). Parameters ---------- n : node Nodes can be, for example, strings or numbers. Nodes must be hashable (and not None) Python objects. t : snapshot id (default=None) If None will be returned the presence of the interaction on the flattened graph.
https://github.com/GiulioRossetti/dynetx/blob/634e2b38f8950885aebfa079dad7d5e8d7563f1d/dynetx/classes/dyndigraph.py#L888-L906
GiulioRossetti/dynetx
dynetx/classes/dyndigraph.py
DynDiGraph.predecessors_iter
def predecessors_iter(self, n, t=None): """Return an iterator over predecessors nodes of n at time t (optional). Parameters ---------- n : node Nodes can be, for example, strings or numbers. Nodes must be hashable (and not None) Python objects. t : snapshot id (default=None) If None will be returned the presence of the interaction on the flattened graph. """ try: if t is None: return iter(self._pred[n]) else: return iter([i for i in self._pred[n] if self.__presence_test(i, n, t)]) except KeyError: raise nx.NetworkXError("The node %s is not in the graph." % (n,))
python
def predecessors_iter(self, n, t=None): """Return an iterator over predecessors nodes of n at time t (optional). Parameters ---------- n : node Nodes can be, for example, strings or numbers. Nodes must be hashable (and not None) Python objects. t : snapshot id (default=None) If None will be returned the presence of the interaction on the flattened graph. """ try: if t is None: return iter(self._pred[n]) else: return iter([i for i in self._pred[n] if self.__presence_test(i, n, t)]) except KeyError: raise nx.NetworkXError("The node %s is not in the graph." % (n,))
Return an iterator over predecessors nodes of n at time t (optional). Parameters ---------- n : node Nodes can be, for example, strings or numbers. Nodes must be hashable (and not None) Python objects. t : snapshot id (default=None) If None will be returned the presence of the interaction on the flattened graph.
https://github.com/GiulioRossetti/dynetx/blob/634e2b38f8950885aebfa079dad7d5e8d7563f1d/dynetx/classes/dyndigraph.py#L908-L927
GiulioRossetti/dynetx
dynetx/classes/dyndigraph.py
DynDiGraph.in_degree
def in_degree(self, nbunch=None, t=None): """Return the in degree of a node or nodes at time t. The node in degree is the number of incoming interaction to that node in a given time frame. Parameters ---------- nbunch : iterable container, optional (default=all nodes) A container of nodes. The container will be iterated through once. t : snapshot id (default=None) If None will be returned the degree of nodes on the flattened graph. Returns ------- nd : dictionary, or number A dictionary with nodes as keys and degree as values or a number if a single node is specified. Examples -------- >>> G = dn.DynDiGraph() >>> G.add_interaction(0,1, t=0) >>> G.add_interaction(1,2, t=0) >>> G.add_interaction(2,3, t=0) >>> G.in_degree(0, t=0) 1 >>> G.in_degree([0,1], t=1) {0: 0, 1: 0} >>> list(G.in_degree([0,1], t=0).values()) [1, 2] """ if nbunch in self: # return a single node return next(self.in_degree_iter(nbunch, t))[1] else: # return a dict return dict(self.in_degree_iter(nbunch, t))
python
def in_degree(self, nbunch=None, t=None): """Return the in degree of a node or nodes at time t. The node in degree is the number of incoming interaction to that node in a given time frame. Parameters ---------- nbunch : iterable container, optional (default=all nodes) A container of nodes. The container will be iterated through once. t : snapshot id (default=None) If None will be returned the degree of nodes on the flattened graph. Returns ------- nd : dictionary, or number A dictionary with nodes as keys and degree as values or a number if a single node is specified. Examples -------- >>> G = dn.DynDiGraph() >>> G.add_interaction(0,1, t=0) >>> G.add_interaction(1,2, t=0) >>> G.add_interaction(2,3, t=0) >>> G.in_degree(0, t=0) 1 >>> G.in_degree([0,1], t=1) {0: 0, 1: 0} >>> list(G.in_degree([0,1], t=0).values()) [1, 2] """ if nbunch in self: # return a single node return next(self.in_degree_iter(nbunch, t))[1] else: # return a dict return dict(self.in_degree_iter(nbunch, t))
Return the in degree of a node or nodes at time t. The node in degree is the number of incoming interaction to that node in a given time frame. Parameters ---------- nbunch : iterable container, optional (default=all nodes) A container of nodes. The container will be iterated through once. t : snapshot id (default=None) If None will be returned the degree of nodes on the flattened graph. Returns ------- nd : dictionary, or number A dictionary with nodes as keys and degree as values or a number if a single node is specified. Examples -------- >>> G = dn.DynDiGraph() >>> G.add_interaction(0,1, t=0) >>> G.add_interaction(1,2, t=0) >>> G.add_interaction(2,3, t=0) >>> G.in_degree(0, t=0) 1 >>> G.in_degree([0,1], t=1) {0: 0, 1: 0} >>> list(G.in_degree([0,1], t=0).values()) [1, 2]
https://github.com/GiulioRossetti/dynetx/blob/634e2b38f8950885aebfa079dad7d5e8d7563f1d/dynetx/classes/dyndigraph.py#L961-L998
GiulioRossetti/dynetx
dynetx/classes/dyndigraph.py
DynDiGraph.in_degree_iter
def in_degree_iter(self, nbunch=None, t=None): """Return an iterator for (node, in_degree) at time t. The node degree is the number of edges incoming to the node in a given timeframe. Parameters ---------- nbunch : iterable container, optional (default=all nodes) A container of nodes. The container will be iterated through once. t : snapshot id (default=None) If None will be returned an iterator over the degree of nodes on the flattened graph. Returns ------- nd_iter : an iterator The iterator returns two-tuples of (node, degree). See Also -------- degree Examples -------- >>> G = dn.DynDiGraph() >>> G.add_interaction(0, 1, t=0) >>> list(G.in_degree_iter(0, t=0)) [(0, 0)] >>> list(G.in_degree_iter([0,1], t=0)) [(0, 0), (1, 1)] """ if nbunch is None: nodes_nbrs = self._pred.items() else: nodes_nbrs = ((n, self._pred[n]) for n in self.nbunch_iter(nbunch)) if t is None: for n, nbrs in nodes_nbrs: deg = len(self._pred[n]) yield (n, deg) else: for n, nbrs in nodes_nbrs: edges_t = len([v for v in nbrs.keys() if self.__presence_test(v, n, t)]) if edges_t > 0: yield (n, edges_t) else: yield (n, 0)
python
def in_degree_iter(self, nbunch=None, t=None): """Return an iterator for (node, in_degree) at time t. The node degree is the number of edges incoming to the node in a given timeframe. Parameters ---------- nbunch : iterable container, optional (default=all nodes) A container of nodes. The container will be iterated through once. t : snapshot id (default=None) If None will be returned an iterator over the degree of nodes on the flattened graph. Returns ------- nd_iter : an iterator The iterator returns two-tuples of (node, degree). See Also -------- degree Examples -------- >>> G = dn.DynDiGraph() >>> G.add_interaction(0, 1, t=0) >>> list(G.in_degree_iter(0, t=0)) [(0, 0)] >>> list(G.in_degree_iter([0,1], t=0)) [(0, 0), (1, 1)] """ if nbunch is None: nodes_nbrs = self._pred.items() else: nodes_nbrs = ((n, self._pred[n]) for n in self.nbunch_iter(nbunch)) if t is None: for n, nbrs in nodes_nbrs: deg = len(self._pred[n]) yield (n, deg) else: for n, nbrs in nodes_nbrs: edges_t = len([v for v in nbrs.keys() if self.__presence_test(v, n, t)]) if edges_t > 0: yield (n, edges_t) else: yield (n, 0)
Return an iterator for (node, in_degree) at time t. The node degree is the number of edges incoming to the node in a given timeframe. Parameters ---------- nbunch : iterable container, optional (default=all nodes) A container of nodes. The container will be iterated through once. t : snapshot id (default=None) If None will be returned an iterator over the degree of nodes on the flattened graph. Returns ------- nd_iter : an iterator The iterator returns two-tuples of (node, degree). See Also -------- degree Examples -------- >>> G = dn.DynDiGraph() >>> G.add_interaction(0, 1, t=0) >>> list(G.in_degree_iter(0, t=0)) [(0, 0)] >>> list(G.in_degree_iter([0,1], t=0)) [(0, 0), (1, 1)]
https://github.com/GiulioRossetti/dynetx/blob/634e2b38f8950885aebfa079dad7d5e8d7563f1d/dynetx/classes/dyndigraph.py#L1000-L1048
GiulioRossetti/dynetx
dynetx/classes/dyndigraph.py
DynDiGraph.out_degree
def out_degree(self, nbunch=None, t=None): """Return the out degree of a node or nodes at time t. The node degree is the number of interaction outgoing from that node in a given time frame. Parameters ---------- nbunch : iterable container, optional (default=all nodes) A container of nodes. The container will be iterated through once. t : snapshot id (default=None) If None will be returned the degree of nodes on the flattened graph. Returns ------- nd : dictionary, or number A dictionary with nodes as keys and degree as values or a number if a single node is specified. Examples -------- >>> G = dn.DynDiGraph() >>> G.add_interactions(0,1, t=0) >>> G.add_interactions(1,2, t=0) >>> G.add_interactions(2,3, t=0) >>> G.out_degree(0, t=0) 1 >>> G.out_degree([0,1], t=1) {0: 0, 1: 0} >>> list(G.out_degree([0,1], t=0).values()) [1, 2] """ if nbunch in self: # return a single node return next(self.out_degree_iter(nbunch, t))[1] else: # return a dict return dict(self.out_degree_iter(nbunch, t))
python
def out_degree(self, nbunch=None, t=None): """Return the out degree of a node or nodes at time t. The node degree is the number of interaction outgoing from that node in a given time frame. Parameters ---------- nbunch : iterable container, optional (default=all nodes) A container of nodes. The container will be iterated through once. t : snapshot id (default=None) If None will be returned the degree of nodes on the flattened graph. Returns ------- nd : dictionary, or number A dictionary with nodes as keys and degree as values or a number if a single node is specified. Examples -------- >>> G = dn.DynDiGraph() >>> G.add_interactions(0,1, t=0) >>> G.add_interactions(1,2, t=0) >>> G.add_interactions(2,3, t=0) >>> G.out_degree(0, t=0) 1 >>> G.out_degree([0,1], t=1) {0: 0, 1: 0} >>> list(G.out_degree([0,1], t=0).values()) [1, 2] """ if nbunch in self: # return a single node return next(self.out_degree_iter(nbunch, t))[1] else: # return a dict return dict(self.out_degree_iter(nbunch, t))
Return the out degree of a node or nodes at time t. The node degree is the number of interaction outgoing from that node in a given time frame. Parameters ---------- nbunch : iterable container, optional (default=all nodes) A container of nodes. The container will be iterated through once. t : snapshot id (default=None) If None will be returned the degree of nodes on the flattened graph. Returns ------- nd : dictionary, or number A dictionary with nodes as keys and degree as values or a number if a single node is specified. Examples -------- >>> G = dn.DynDiGraph() >>> G.add_interactions(0,1, t=0) >>> G.add_interactions(1,2, t=0) >>> G.add_interactions(2,3, t=0) >>> G.out_degree(0, t=0) 1 >>> G.out_degree([0,1], t=1) {0: 0, 1: 0} >>> list(G.out_degree([0,1], t=0).values()) [1, 2]
https://github.com/GiulioRossetti/dynetx/blob/634e2b38f8950885aebfa079dad7d5e8d7563f1d/dynetx/classes/dyndigraph.py#L1050-L1087
GiulioRossetti/dynetx
dynetx/classes/dyndigraph.py
DynDiGraph.out_degree_iter
def out_degree_iter(self, nbunch=None, t=None): """Return an iterator for (node, out_degree) at time t. The node out degree is the number of interactions outgoing from the node in a given timeframe. Parameters ---------- nbunch : iterable container, optional (default=all nodes) A container of nodes. The container will be iterated through once. t : snapshot id (default=None) If None will be returned an iterator over the degree of nodes on the flattened graph. Returns ------- nd_iter : an iterator The iterator returns two-tuples of (node, degree). See Also -------- degree Examples -------- >>> G = dn.DynDiGraph() >>> G.add_interaction(0, 1, t=0) >>> list(G.out_degree_iter(0, t=0)) [(0, 1)] >>> list(G.out_degree_iter([0,1], t=0)) [(0, 1)] """ if nbunch is None: nodes_nbrs = self._succ.items() else: nodes_nbrs = ((n, self._succ[n]) for n in self.nbunch_iter(nbunch)) if t is None: for n, nbrs in nodes_nbrs: deg = len(self._succ[n]) yield (n, deg) else: for n, nbrs in nodes_nbrs: edges_t = len([v for v in nbrs.keys() if self.__presence_test(n, v, t)]) if edges_t > 0: yield (n, edges_t) else: yield (n, 0)
python
def out_degree_iter(self, nbunch=None, t=None): """Return an iterator for (node, out_degree) at time t. The node out degree is the number of interactions outgoing from the node in a given timeframe. Parameters ---------- nbunch : iterable container, optional (default=all nodes) A container of nodes. The container will be iterated through once. t : snapshot id (default=None) If None will be returned an iterator over the degree of nodes on the flattened graph. Returns ------- nd_iter : an iterator The iterator returns two-tuples of (node, degree). See Also -------- degree Examples -------- >>> G = dn.DynDiGraph() >>> G.add_interaction(0, 1, t=0) >>> list(G.out_degree_iter(0, t=0)) [(0, 1)] >>> list(G.out_degree_iter([0,1], t=0)) [(0, 1)] """ if nbunch is None: nodes_nbrs = self._succ.items() else: nodes_nbrs = ((n, self._succ[n]) for n in self.nbunch_iter(nbunch)) if t is None: for n, nbrs in nodes_nbrs: deg = len(self._succ[n]) yield (n, deg) else: for n, nbrs in nodes_nbrs: edges_t = len([v for v in nbrs.keys() if self.__presence_test(n, v, t)]) if edges_t > 0: yield (n, edges_t) else: yield (n, 0)
Return an iterator for (node, out_degree) at time t. The node out degree is the number of interactions outgoing from the node in a given timeframe. Parameters ---------- nbunch : iterable container, optional (default=all nodes) A container of nodes. The container will be iterated through once. t : snapshot id (default=None) If None will be returned an iterator over the degree of nodes on the flattened graph. Returns ------- nd_iter : an iterator The iterator returns two-tuples of (node, degree). See Also -------- degree Examples -------- >>> G = dn.DynDiGraph() >>> G.add_interaction(0, 1, t=0) >>> list(G.out_degree_iter(0, t=0)) [(0, 1)] >>> list(G.out_degree_iter([0,1], t=0)) [(0, 1)]
https://github.com/GiulioRossetti/dynetx/blob/634e2b38f8950885aebfa079dad7d5e8d7563f1d/dynetx/classes/dyndigraph.py#L1089-L1137
GiulioRossetti/dynetx
dynetx/classes/dyndigraph.py
DynDiGraph.to_undirected
def to_undirected(self, reciprocal=False): """Return an undirected representation of the dyndigraph. Parameters ---------- reciprocal : bool (optional) If True only keep edges that appear in both directions in the original dyndigraph. Returns ------- G : DynGraph An undirected dynamic graph with the same name and nodes and with edge (u,v,data) if either (u,v,data) or (v,u,data) is in the dyndigraph. If both edges exist in dyndigraph and their edge data is different, only one edge is created with an arbitrary choice of which edge data to use. You must check and correct for this manually if desired. Notes ----- If edges in both directions (u,v) and (v,u) exist in the graph, attributes for the new undirected edge will be a combination of the attributes of the directed edges. The edge data is updated in the (arbitrary) order that the edges are encountered. For more customized control of the edge attributes use add_edge(). This returns a "deepcopy" of the edge, node, and graph attributes which attempts to completely copy all of the data and references. This is in contrast to the similar G=DynDiGraph(D) which returns a shallow copy of the data. See the Python copy module for more information on shallow and deep copies, http://docs.python.org/library/copy.html. Warning: If you have subclassed DiGraph to use dict-like objects in the data structure, those changes do not transfer to the Graph created by this method. """ from .dyngraph import DynGraph H = DynGraph() H.name = self.name H.add_nodes_from(self) if reciprocal is True: for u in self._node: for v in self._node: if u >= v: try: outc = self._succ[u][v]['t'] intc = self._pred[u][v]['t'] for o in outc: r = set(range(o[0], o[1]+1)) for i in intc: r2 = set(range(i[0], i[1]+1)) inter = list(r & r2) if len(inter) == 1: H.add_interaction(u, v, t=inter[0]) elif len(inter) > 1: H.add_interaction(u, v, t=inter[0], e=inter[-1]) except: pass else: for it in self.interactions_iter(): for t in it[2]['t']: H.add_interaction(it[0], it[1], t=t[0], e=t[1]) H.graph = deepcopy(self.graph) H._node = deepcopy(self._node) return H
python
def to_undirected(self, reciprocal=False): """Return an undirected representation of the dyndigraph. Parameters ---------- reciprocal : bool (optional) If True only keep edges that appear in both directions in the original dyndigraph. Returns ------- G : DynGraph An undirected dynamic graph with the same name and nodes and with edge (u,v,data) if either (u,v,data) or (v,u,data) is in the dyndigraph. If both edges exist in dyndigraph and their edge data is different, only one edge is created with an arbitrary choice of which edge data to use. You must check and correct for this manually if desired. Notes ----- If edges in both directions (u,v) and (v,u) exist in the graph, attributes for the new undirected edge will be a combination of the attributes of the directed edges. The edge data is updated in the (arbitrary) order that the edges are encountered. For more customized control of the edge attributes use add_edge(). This returns a "deepcopy" of the edge, node, and graph attributes which attempts to completely copy all of the data and references. This is in contrast to the similar G=DynDiGraph(D) which returns a shallow copy of the data. See the Python copy module for more information on shallow and deep copies, http://docs.python.org/library/copy.html. Warning: If you have subclassed DiGraph to use dict-like objects in the data structure, those changes do not transfer to the Graph created by this method. """ from .dyngraph import DynGraph H = DynGraph() H.name = self.name H.add_nodes_from(self) if reciprocal is True: for u in self._node: for v in self._node: if u >= v: try: outc = self._succ[u][v]['t'] intc = self._pred[u][v]['t'] for o in outc: r = set(range(o[0], o[1]+1)) for i in intc: r2 = set(range(i[0], i[1]+1)) inter = list(r & r2) if len(inter) == 1: H.add_interaction(u, v, t=inter[0]) elif len(inter) > 1: H.add_interaction(u, v, t=inter[0], e=inter[-1]) except: pass else: for it in self.interactions_iter(): for t in it[2]['t']: H.add_interaction(it[0], it[1], t=t[0], e=t[1]) H.graph = deepcopy(self.graph) H._node = deepcopy(self._node) return H
Return an undirected representation of the dyndigraph. Parameters ---------- reciprocal : bool (optional) If True only keep edges that appear in both directions in the original dyndigraph. Returns ------- G : DynGraph An undirected dynamic graph with the same name and nodes and with edge (u,v,data) if either (u,v,data) or (v,u,data) is in the dyndigraph. If both edges exist in dyndigraph and their edge data is different, only one edge is created with an arbitrary choice of which edge data to use. You must check and correct for this manually if desired. Notes ----- If edges in both directions (u,v) and (v,u) exist in the graph, attributes for the new undirected edge will be a combination of the attributes of the directed edges. The edge data is updated in the (arbitrary) order that the edges are encountered. For more customized control of the edge attributes use add_edge(). This returns a "deepcopy" of the edge, node, and graph attributes which attempts to completely copy all of the data and references. This is in contrast to the similar G=DynDiGraph(D) which returns a shallow copy of the data. See the Python copy module for more information on shallow and deep copies, http://docs.python.org/library/copy.html. Warning: If you have subclassed DiGraph to use dict-like objects in the data structure, those changes do not transfer to the Graph created by this method.
https://github.com/GiulioRossetti/dynetx/blob/634e2b38f8950885aebfa079dad7d5e8d7563f1d/dynetx/classes/dyndigraph.py#L1602-L1675
GiulioRossetti/dynetx
dynetx/readwrite/edgelist.py
write_interactions
def write_interactions(G, path, delimiter=' ', encoding='utf-8'): """Write a DyNetx graph in interaction list format. Parameters ---------- G : graph A DyNetx graph. path : basestring The desired output filename delimiter : character Column delimiter """ for line in generate_interactions(G, delimiter): line += '\n' path.write(line.encode(encoding))
python
def write_interactions(G, path, delimiter=' ', encoding='utf-8'): """Write a DyNetx graph in interaction list format. Parameters ---------- G : graph A DyNetx graph. path : basestring The desired output filename delimiter : character Column delimiter """ for line in generate_interactions(G, delimiter): line += '\n' path.write(line.encode(encoding))
Write a DyNetx graph in interaction list format. Parameters ---------- G : graph A DyNetx graph. path : basestring The desired output filename delimiter : character Column delimiter
https://github.com/GiulioRossetti/dynetx/blob/634e2b38f8950885aebfa079dad7d5e8d7563f1d/dynetx/readwrite/edgelist.py#L49-L67
GiulioRossetti/dynetx
dynetx/readwrite/edgelist.py
read_interactions
def read_interactions(path, comments="#", directed=False, delimiter=None, nodetype=None, timestamptype=None, encoding='utf-8', keys=False): """Read a DyNetx graph from interaction list format. Parameters ---------- path : basestring The desired output filename delimiter : character Column delimiter """ ids = None lines = (line.decode(encoding) for line in path) if keys: ids = read_ids(path.name, delimiter=delimiter, timestamptype=timestamptype) return parse_interactions(lines, comments=comments, directed=directed, delimiter=delimiter, nodetype=nodetype, timestamptype=timestamptype, keys=ids)
python
def read_interactions(path, comments="#", directed=False, delimiter=None, nodetype=None, timestamptype=None, encoding='utf-8', keys=False): """Read a DyNetx graph from interaction list format. Parameters ---------- path : basestring The desired output filename delimiter : character Column delimiter """ ids = None lines = (line.decode(encoding) for line in path) if keys: ids = read_ids(path.name, delimiter=delimiter, timestamptype=timestamptype) return parse_interactions(lines, comments=comments, directed=directed, delimiter=delimiter, nodetype=nodetype, timestamptype=timestamptype, keys=ids)
Read a DyNetx graph from interaction list format. Parameters ---------- path : basestring The desired output filename delimiter : character Column delimiter
https://github.com/GiulioRossetti/dynetx/blob/634e2b38f8950885aebfa079dad7d5e8d7563f1d/dynetx/readwrite/edgelist.py#L71-L91
GiulioRossetti/dynetx
dynetx/readwrite/edgelist.py
write_snapshots
def write_snapshots(G, path, delimiter=' ', encoding='utf-8'): """Write a DyNetx graph in snapshot graph list format. Parameters ---------- G : graph A DyNetx graph. path : basestring The desired output filename delimiter : character Column delimiter """ for line in generate_snapshots(G, delimiter): line += '\n' path.write(line.encode(encoding))
python
def write_snapshots(G, path, delimiter=' ', encoding='utf-8'): """Write a DyNetx graph in snapshot graph list format. Parameters ---------- G : graph A DyNetx graph. path : basestring The desired output filename delimiter : character Column delimiter """ for line in generate_snapshots(G, delimiter): line += '\n' path.write(line.encode(encoding))
Write a DyNetx graph in snapshot graph list format. Parameters ---------- G : graph A DyNetx graph. path : basestring The desired output filename delimiter : character Column delimiter
https://github.com/GiulioRossetti/dynetx/blob/634e2b38f8950885aebfa079dad7d5e8d7563f1d/dynetx/readwrite/edgelist.py#L164-L182
GiulioRossetti/dynetx
dynetx/readwrite/edgelist.py
read_snapshots
def read_snapshots(path, comments="#", directed=False, delimiter=None, nodetype=None, timestamptype=None, encoding='utf-8', keys=False): """Read a DyNetx graph from snapshot graph list format. Parameters ---------- path : basestring The desired output filename delimiter : character Column delimiter """ ids = None lines = (line.decode(encoding) for line in path) if keys: ids = read_ids(path.name, delimiter=delimiter, timestamptype=timestamptype) return parse_snapshots(lines, comments=comments, directed=directed, delimiter=delimiter, nodetype=nodetype, timestamptype=timestamptype, keys=ids)
python
def read_snapshots(path, comments="#", directed=False, delimiter=None, nodetype=None, timestamptype=None, encoding='utf-8', keys=False): """Read a DyNetx graph from snapshot graph list format. Parameters ---------- path : basestring The desired output filename delimiter : character Column delimiter """ ids = None lines = (line.decode(encoding) for line in path) if keys: ids = read_ids(path.name, delimiter=delimiter, timestamptype=timestamptype) return parse_snapshots(lines, comments=comments, directed=directed, delimiter=delimiter, nodetype=nodetype, timestamptype=timestamptype, keys=ids)
Read a DyNetx graph from snapshot graph list format. Parameters ---------- path : basestring The desired output filename delimiter : character Column delimiter
https://github.com/GiulioRossetti/dynetx/blob/634e2b38f8950885aebfa079dad7d5e8d7563f1d/dynetx/readwrite/edgelist.py#L237-L257
GiulioRossetti/dynetx
dynetx/utils/decorators.py
open_file
def open_file(path_arg, mode='r'): """Decorator to ensure clean opening and closing of files. Parameters ---------- path_arg : int Location of the path argument in args. Even if the argument is a named positional argument (with a default value), you must specify its index as a positional argument. mode : str String for opening mode. Returns ------- _open_file : function Function which cleanly executes the io. Examples -------- Decorate functions like this:: @open_file(0,'r') def read_function(pathname): pass @open_file(1,'w') def write_function(G,pathname): pass @open_file(1,'w') def write_function(G, pathname='graph.dot') pass @open_file('path', 'w+') def another_function(arg, **kwargs): path = kwargs['path'] pass """ # Note that this decorator solves the problem when a path argument is # specified as a string, but it does not handle the situation when the # function wants to accept a default of None (and then handle it). # Here is an example: # # @open_file('path') # def some_function(arg1, arg2, path=None): # if path is None: # fobj = tempfile.NamedTemporaryFile(delete=False) # close_fobj = True # else: # # `path` could have been a string or file object or something # # similar. In any event, the decorator has given us a file object # # and it will close it for us, if it should. # fobj = path # close_fobj = False # # try: # fobj.write('blah') # finally: # if close_fobj: # fobj.close() # # Normally, we'd want to use "with" to ensure that fobj gets closed. # However, recall that the decorator will make `path` a file object for # us, and using "with" would undesirably close that file object. Instead, # you use a try block, as shown above. When we exit the function, fobj will # be closed, if it should be, by the decorator. @decorator def _open_file(func, *args, **kwargs): # Note that since we have used @decorator, *args, and **kwargs have # already been resolved to match the function signature of func. This # means default values have been propagated. For example, the function # func(x, y, a=1, b=2, **kwargs) if called as func(0,1,b=5,c=10) would # have args=(0,1,1,5) and kwargs={'c':10}. # First we parse the arguments of the decorator. The path_arg could # be an positional argument or a keyword argument. Even if it is try: # path_arg is a required positional argument # This works precisely because we are using @decorator path = args[path_arg] except TypeError: # path_arg is a keyword argument. It is "required" in the sense # that it must exist, according to the decorator specification, # It can exist in `kwargs` by a developer specified default value # or it could have been explicitly set by the user. try: path = kwargs[path_arg] except KeyError: # Could not find the keyword. Thus, no default was specified # in the function signature and the user did not provide it. msg = 'Missing required keyword argument: {0}' raise nx.NetworkXError(msg.format(path_arg)) else: is_kwarg = True except IndexError: # A "required" argument was missing. This can only happen if # the decorator of the function was incorrectly specified. # So this probably is not a user error, but a developer error. msg = "path_arg of open_file decorator is incorrect" raise nx.NetworkXError(msg) else: is_kwarg = False # Now we have the path_arg. There are two types of input to consider: # 1) string representing a path that should be opened # 2) an already opened file object if is_string_like(path): ext = splitext(path)[1] fobj = _dispatch_dict[ext](path, mode=mode) close_fobj = True elif hasattr(path, 'read'): # path is already a file-like object fobj = path close_fobj = False else: # could be None, in which case the algorithm will deal with it fobj = path close_fobj = False # Insert file object into args or kwargs. if is_kwarg: new_args = args kwargs[path_arg] = fobj else: # args is a tuple, so we must convert to list before modifying it. new_args = list(args) new_args[path_arg] = fobj # Finally, we call the original function, making sure to close the fobj. try: result = func(*new_args, **kwargs) finally: if close_fobj: fobj.close() return result return _open_file
python
def open_file(path_arg, mode='r'): """Decorator to ensure clean opening and closing of files. Parameters ---------- path_arg : int Location of the path argument in args. Even if the argument is a named positional argument (with a default value), you must specify its index as a positional argument. mode : str String for opening mode. Returns ------- _open_file : function Function which cleanly executes the io. Examples -------- Decorate functions like this:: @open_file(0,'r') def read_function(pathname): pass @open_file(1,'w') def write_function(G,pathname): pass @open_file(1,'w') def write_function(G, pathname='graph.dot') pass @open_file('path', 'w+') def another_function(arg, **kwargs): path = kwargs['path'] pass """ # Note that this decorator solves the problem when a path argument is # specified as a string, but it does not handle the situation when the # function wants to accept a default of None (and then handle it). # Here is an example: # # @open_file('path') # def some_function(arg1, arg2, path=None): # if path is None: # fobj = tempfile.NamedTemporaryFile(delete=False) # close_fobj = True # else: # # `path` could have been a string or file object or something # # similar. In any event, the decorator has given us a file object # # and it will close it for us, if it should. # fobj = path # close_fobj = False # # try: # fobj.write('blah') # finally: # if close_fobj: # fobj.close() # # Normally, we'd want to use "with" to ensure that fobj gets closed. # However, recall that the decorator will make `path` a file object for # us, and using "with" would undesirably close that file object. Instead, # you use a try block, as shown above. When we exit the function, fobj will # be closed, if it should be, by the decorator. @decorator def _open_file(func, *args, **kwargs): # Note that since we have used @decorator, *args, and **kwargs have # already been resolved to match the function signature of func. This # means default values have been propagated. For example, the function # func(x, y, a=1, b=2, **kwargs) if called as func(0,1,b=5,c=10) would # have args=(0,1,1,5) and kwargs={'c':10}. # First we parse the arguments of the decorator. The path_arg could # be an positional argument or a keyword argument. Even if it is try: # path_arg is a required positional argument # This works precisely because we are using @decorator path = args[path_arg] except TypeError: # path_arg is a keyword argument. It is "required" in the sense # that it must exist, according to the decorator specification, # It can exist in `kwargs` by a developer specified default value # or it could have been explicitly set by the user. try: path = kwargs[path_arg] except KeyError: # Could not find the keyword. Thus, no default was specified # in the function signature and the user did not provide it. msg = 'Missing required keyword argument: {0}' raise nx.NetworkXError(msg.format(path_arg)) else: is_kwarg = True except IndexError: # A "required" argument was missing. This can only happen if # the decorator of the function was incorrectly specified. # So this probably is not a user error, but a developer error. msg = "path_arg of open_file decorator is incorrect" raise nx.NetworkXError(msg) else: is_kwarg = False # Now we have the path_arg. There are two types of input to consider: # 1) string representing a path that should be opened # 2) an already opened file object if is_string_like(path): ext = splitext(path)[1] fobj = _dispatch_dict[ext](path, mode=mode) close_fobj = True elif hasattr(path, 'read'): # path is already a file-like object fobj = path close_fobj = False else: # could be None, in which case the algorithm will deal with it fobj = path close_fobj = False # Insert file object into args or kwargs. if is_kwarg: new_args = args kwargs[path_arg] = fobj else: # args is a tuple, so we must convert to list before modifying it. new_args = list(args) new_args[path_arg] = fobj # Finally, we call the original function, making sure to close the fobj. try: result = func(*new_args, **kwargs) finally: if close_fobj: fobj.close() return result return _open_file
Decorator to ensure clean opening and closing of files. Parameters ---------- path_arg : int Location of the path argument in args. Even if the argument is a named positional argument (with a default value), you must specify its index as a positional argument. mode : str String for opening mode. Returns ------- _open_file : function Function which cleanly executes the io. Examples -------- Decorate functions like this:: @open_file(0,'r') def read_function(pathname): pass @open_file(1,'w') def write_function(G,pathname): pass @open_file(1,'w') def write_function(G, pathname='graph.dot') pass @open_file('path', 'w+') def another_function(arg, **kwargs): path = kwargs['path'] pass
https://github.com/GiulioRossetti/dynetx/blob/634e2b38f8950885aebfa079dad7d5e8d7563f1d/dynetx/utils/decorators.py#L62-L201
GiulioRossetti/dynetx
dynetx/classes/function.py
number_of_interactions
def number_of_interactions(G, u=None, v=None, t=None): """Return the number of edges between two nodes at time t. Parameters ---------- u, v : nodes, optional (default=all edges) If u and v are specified, return the number of edges between u and v. Otherwise return the total number of all edges. t : snapshot id (default=None) If None will be returned the number of edges on the flattened graph. Returns ------- nedges : int The number of edges in the graph. If nodes u and v are specified return the number of edges between those nodes. Examples -------- >>> G = dn.DynGraph() >>> G.add_path([0,1,2,3], t=0) >>> dn.number_of_interactions(G, t=0) """ return G.number_of_interactions(u, v, t)
python
def number_of_interactions(G, u=None, v=None, t=None): """Return the number of edges between two nodes at time t. Parameters ---------- u, v : nodes, optional (default=all edges) If u and v are specified, return the number of edges between u and v. Otherwise return the total number of all edges. t : snapshot id (default=None) If None will be returned the number of edges on the flattened graph. Returns ------- nedges : int The number of edges in the graph. If nodes u and v are specified return the number of edges between those nodes. Examples -------- >>> G = dn.DynGraph() >>> G.add_path([0,1,2,3], t=0) >>> dn.number_of_interactions(G, t=0) """ return G.number_of_interactions(u, v, t)
Return the number of edges between two nodes at time t. Parameters ---------- u, v : nodes, optional (default=all edges) If u and v are specified, return the number of edges between u and v. Otherwise return the total number of all edges. t : snapshot id (default=None) If None will be returned the number of edges on the flattened graph. Returns ------- nedges : int The number of edges in the graph. If nodes u and v are specified return the number of edges between those nodes. Examples -------- >>> G = dn.DynGraph() >>> G.add_path([0,1,2,3], t=0) >>> dn.number_of_interactions(G, t=0)
https://github.com/GiulioRossetti/dynetx/blob/634e2b38f8950885aebfa079dad7d5e8d7563f1d/dynetx/classes/function.py#L211-L235
GiulioRossetti/dynetx
dynetx/classes/function.py
density
def density(G, t=None): r"""Return the density of a graph at timestamp t. The density for undirected graphs is .. math:: d = \frac{2m}{n(n-1)}, and for directed graphs is .. math:: d = \frac{m}{n(n-1)}, where `n` is the number of nodes and `m` is the number of edges in `G`. Parameters ---------- G : Graph opject DyNetx graph object t : snapshot id (default=None) If None the density will be computed on the flattened graph. Notes ----- The density is 0 for a graph without edges and 1 for a complete graph. Self loops are counted in the total number of edges so graphs with self loops can have density higher than 1. """ n = number_of_nodes(G, t) m = number_of_interactions(G, t) if m == 0 or m is None or n <= 1: return 0 d = m / (n * (n - 1)) if not G.is_directed(): d *= 2 return d
python
def density(G, t=None): r"""Return the density of a graph at timestamp t. The density for undirected graphs is .. math:: d = \frac{2m}{n(n-1)}, and for directed graphs is .. math:: d = \frac{m}{n(n-1)}, where `n` is the number of nodes and `m` is the number of edges in `G`. Parameters ---------- G : Graph opject DyNetx graph object t : snapshot id (default=None) If None the density will be computed on the flattened graph. Notes ----- The density is 0 for a graph without edges and 1 for a complete graph. Self loops are counted in the total number of edges so graphs with self loops can have density higher than 1. """ n = number_of_nodes(G, t) m = number_of_interactions(G, t) if m == 0 or m is None or n <= 1: return 0 d = m / (n * (n - 1)) if not G.is_directed(): d *= 2 return d
r"""Return the density of a graph at timestamp t. The density for undirected graphs is .. math:: d = \frac{2m}{n(n-1)}, and for directed graphs is .. math:: d = \frac{m}{n(n-1)}, where `n` is the number of nodes and `m` is the number of edges in `G`. Parameters ---------- G : Graph opject DyNetx graph object t : snapshot id (default=None) If None the density will be computed on the flattened graph. Notes ----- The density is 0 for a graph without edges and 1 for a complete graph. Self loops are counted in the total number of edges so graphs with self loops can have density higher than 1.
https://github.com/GiulioRossetti/dynetx/blob/634e2b38f8950885aebfa079dad7d5e8d7563f1d/dynetx/classes/function.py#L238-L279
GiulioRossetti/dynetx
dynetx/classes/function.py
degree_histogram
def degree_histogram(G, t=None): """Return a list of the frequency of each degree value. Parameters ---------- G : Graph opject DyNetx graph object t : snapshot id (default=None) snapshot id Returns ------- hist : list A list of frequencies of degrees. The degree values are the index in the list. Notes ----- Note: the bins are width one, hence len(list) can be large (Order(number_of_edges)) """ counts = Counter(d for n, d in G.degree(t=t).items()) return [counts.get(i, 0) for i in range(max(counts) + 1)]
python
def degree_histogram(G, t=None): """Return a list of the frequency of each degree value. Parameters ---------- G : Graph opject DyNetx graph object t : snapshot id (default=None) snapshot id Returns ------- hist : list A list of frequencies of degrees. The degree values are the index in the list. Notes ----- Note: the bins are width one, hence len(list) can be large (Order(number_of_edges)) """ counts = Counter(d for n, d in G.degree(t=t).items()) return [counts.get(i, 0) for i in range(max(counts) + 1)]
Return a list of the frequency of each degree value. Parameters ---------- G : Graph opject DyNetx graph object t : snapshot id (default=None) snapshot id Returns ------- hist : list A list of frequencies of degrees. The degree values are the index in the list. Notes ----- Note: the bins are width one, hence len(list) can be large (Order(number_of_edges))
https://github.com/GiulioRossetti/dynetx/blob/634e2b38f8950885aebfa079dad7d5e8d7563f1d/dynetx/classes/function.py#L282-L308
GiulioRossetti/dynetx
dynetx/classes/function.py
freeze
def freeze(G): """Modify graph to prevent further change by adding or removing nodes or edges. Node and edge data can still be modified. Parameters ---------- G : graph A NetworkX graph Notes ----- To "unfreeze" a graph you must make a copy by creating a new graph object. See Also -------- is_frozen """ G.add_node = frozen G.add_nodes_from = frozen G.remove_node = frozen G.remove_nodes_from = frozen G.add_edge = frozen G.add_edges_from = frozen G.remove_edge = frozen G.remove_edges_from = frozen G.clear = frozen G.frozen = True return G
python
def freeze(G): """Modify graph to prevent further change by adding or removing nodes or edges. Node and edge data can still be modified. Parameters ---------- G : graph A NetworkX graph Notes ----- To "unfreeze" a graph you must make a copy by creating a new graph object. See Also -------- is_frozen """ G.add_node = frozen G.add_nodes_from = frozen G.remove_node = frozen G.remove_nodes_from = frozen G.add_edge = frozen G.add_edges_from = frozen G.remove_edge = frozen G.remove_edges_from = frozen G.clear = frozen G.frozen = True return G
Modify graph to prevent further change by adding or removing nodes or edges. Node and edge data can still be modified. Parameters ---------- G : graph A NetworkX graph Notes ----- To "unfreeze" a graph you must make a copy by creating a new graph object. See Also -------- is_frozen
https://github.com/GiulioRossetti/dynetx/blob/634e2b38f8950885aebfa079dad7d5e8d7563f1d/dynetx/classes/function.py#L321-L351
GiulioRossetti/dynetx
dynetx/classes/function.py
add_star
def add_star(G, nodes, t, **attr): """Add a star at time t. The first node in nodes is the middle of the star. It is connected to all other nodes. Parameters ---------- G : graph A DyNetx graph nodes : iterable container A container of nodes. t : snapshot id (default=None) snapshot id See Also -------- add_path, add_cycle Examples -------- >>> G = dn.DynGraph() >>> dn.add_star(G, [0,1,2,3], t=0) """ nlist = iter(nodes) v = next(nlist) edges = ((v, n) for n in nlist) G.add_interactions_from(edges, t, **attr)
python
def add_star(G, nodes, t, **attr): """Add a star at time t. The first node in nodes is the middle of the star. It is connected to all other nodes. Parameters ---------- G : graph A DyNetx graph nodes : iterable container A container of nodes. t : snapshot id (default=None) snapshot id See Also -------- add_path, add_cycle Examples -------- >>> G = dn.DynGraph() >>> dn.add_star(G, [0,1,2,3], t=0) """ nlist = iter(nodes) v = next(nlist) edges = ((v, n) for n in nlist) G.add_interactions_from(edges, t, **attr)
Add a star at time t. The first node in nodes is the middle of the star. It is connected to all other nodes. Parameters ---------- G : graph A DyNetx graph nodes : iterable container A container of nodes. t : snapshot id (default=None) snapshot id See Also -------- add_path, add_cycle Examples -------- >>> G = dn.DynGraph() >>> dn.add_star(G, [0,1,2,3], t=0)
https://github.com/GiulioRossetti/dynetx/blob/634e2b38f8950885aebfa079dad7d5e8d7563f1d/dynetx/classes/function.py#L372-L401
GiulioRossetti/dynetx
dynetx/classes/function.py
add_path
def add_path(G, nodes, t, **attr): """Add a path at time t. Parameters ---------- G : graph A DyNetx graph nodes : iterable container A container of nodes. t : snapshot id (default=None) snapshot id See Also -------- add_path, add_cycle Examples -------- >>> G = dn.DynGraph() >>> dn.add_path(G, [0,1,2,3], t=0) """ nlist = list(nodes) edges = zip(nlist[:-1], nlist[1:]) G.add_interactions_from(edges, t, **attr)
python
def add_path(G, nodes, t, **attr): """Add a path at time t. Parameters ---------- G : graph A DyNetx graph nodes : iterable container A container of nodes. t : snapshot id (default=None) snapshot id See Also -------- add_path, add_cycle Examples -------- >>> G = dn.DynGraph() >>> dn.add_path(G, [0,1,2,3], t=0) """ nlist = list(nodes) edges = zip(nlist[:-1], nlist[1:]) G.add_interactions_from(edges, t, **attr)
Add a path at time t. Parameters ---------- G : graph A DyNetx graph nodes : iterable container A container of nodes. t : snapshot id (default=None) snapshot id See Also -------- add_path, add_cycle Examples -------- >>> G = dn.DynGraph() >>> dn.add_path(G, [0,1,2,3], t=0)
https://github.com/GiulioRossetti/dynetx/blob/634e2b38f8950885aebfa079dad7d5e8d7563f1d/dynetx/classes/function.py#L404-L429
GiulioRossetti/dynetx
dynetx/classes/function.py
create_empty_copy
def create_empty_copy(G, with_data=True): """Return a copy of the graph G with all of the edges removed. Parameters ---------- G : graph A DyNetx graph with_data : bool (default=True) Include data. Notes ----- Graph and edge data is not propagated to the new graph. """ H = G.__class__() H.add_nodes_from(G.nodes(data=with_data)) if with_data: H.graph.update(G.graph) return H
python
def create_empty_copy(G, with_data=True): """Return a copy of the graph G with all of the edges removed. Parameters ---------- G : graph A DyNetx graph with_data : bool (default=True) Include data. Notes ----- Graph and edge data is not propagated to the new graph. """ H = G.__class__() H.add_nodes_from(G.nodes(data=with_data)) if with_data: H.graph.update(G.graph) return H
Return a copy of the graph G with all of the edges removed. Parameters ---------- G : graph A DyNetx graph with_data : bool (default=True) Include data. Notes ----- Graph and edge data is not propagated to the new graph.
https://github.com/GiulioRossetti/dynetx/blob/634e2b38f8950885aebfa079dad7d5e8d7563f1d/dynetx/classes/function.py#L481-L500
GiulioRossetti/dynetx
dynetx/classes/function.py
set_node_attributes
def set_node_attributes(G, values, name=None): """Set node attributes from dictionary of nodes and values Parameters ---------- G : DyNetx Graph name : string Attribute name values: dict Dictionary of attribute values keyed by node. If `values` is not a dictionary, then it is treated as a single attribute value that is then applied to every node in `G`. """ # Set node attributes based on type of `values` if name is not None: # `values` must not be a dict of dict try: # `values` is a dict for n, v in values.items(): try: G.node[n][name] = values[n] except KeyError: pass except AttributeError: # `values` is a constant for n in G: G.node[n][name] = values else: # `values` must be dict of dict for n, d in values.items(): try: G.node[n].update(d) except KeyError: pass
python
def set_node_attributes(G, values, name=None): """Set node attributes from dictionary of nodes and values Parameters ---------- G : DyNetx Graph name : string Attribute name values: dict Dictionary of attribute values keyed by node. If `values` is not a dictionary, then it is treated as a single attribute value that is then applied to every node in `G`. """ # Set node attributes based on type of `values` if name is not None: # `values` must not be a dict of dict try: # `values` is a dict for n, v in values.items(): try: G.node[n][name] = values[n] except KeyError: pass except AttributeError: # `values` is a constant for n in G: G.node[n][name] = values else: # `values` must be dict of dict for n, d in values.items(): try: G.node[n].update(d) except KeyError: pass
Set node attributes from dictionary of nodes and values Parameters ---------- G : DyNetx Graph name : string Attribute name values: dict Dictionary of attribute values keyed by node. If `values` is not a dictionary, then it is treated as a single attribute value that is then applied to every node in `G`.
https://github.com/GiulioRossetti/dynetx/blob/634e2b38f8950885aebfa079dad7d5e8d7563f1d/dynetx/classes/function.py#L503-L535
GiulioRossetti/dynetx
dynetx/classes/function.py
get_node_attributes
def get_node_attributes(G, name): """Get node attributes from graph Parameters ---------- G : DyNetx Graph name : string Attribute name Returns ------- Dictionary of attributes keyed by node. """ return {n: d[name] for n, d in G.node.items() if name in d}
python
def get_node_attributes(G, name): """Get node attributes from graph Parameters ---------- G : DyNetx Graph name : string Attribute name Returns ------- Dictionary of attributes keyed by node. """ return {n: d[name] for n, d in G.node.items() if name in d}
Get node attributes from graph Parameters ---------- G : DyNetx Graph name : string Attribute name Returns ------- Dictionary of attributes keyed by node.
https://github.com/GiulioRossetti/dynetx/blob/634e2b38f8950885aebfa079dad7d5e8d7563f1d/dynetx/classes/function.py#L538-L552
GiulioRossetti/dynetx
dynetx/classes/function.py
all_neighbors
def all_neighbors(graph, node, t=None): """ Returns all of the neighbors of a node in the graph at time t. If the graph is directed returns predecessors as well as successors. Parameters ---------- graph : DyNetx graph Graph to find neighbors. node : node The node whose neighbors will be returned. t : snapshot id (default=None) If None the neighbors are identified on the flattened graph. Returns ------- neighbors : iterator Iterator of neighbors """ if graph.is_directed(): values = chain(graph.predecessors(node, t=t), graph.successors(node, t=t)) else: values = graph.neighbors(node, t=t) return values
python
def all_neighbors(graph, node, t=None): """ Returns all of the neighbors of a node in the graph at time t. If the graph is directed returns predecessors as well as successors. Parameters ---------- graph : DyNetx graph Graph to find neighbors. node : node The node whose neighbors will be returned. t : snapshot id (default=None) If None the neighbors are identified on the flattened graph. Returns ------- neighbors : iterator Iterator of neighbors """ if graph.is_directed(): values = chain(graph.predecessors(node, t=t), graph.successors(node, t=t)) else: values = graph.neighbors(node, t=t) return values
Returns all of the neighbors of a node in the graph at time t. If the graph is directed returns predecessors as well as successors. Parameters ---------- graph : DyNetx graph Graph to find neighbors. node : node The node whose neighbors will be returned. t : snapshot id (default=None) If None the neighbors are identified on the flattened graph. Returns ------- neighbors : iterator Iterator of neighbors
https://github.com/GiulioRossetti/dynetx/blob/634e2b38f8950885aebfa079dad7d5e8d7563f1d/dynetx/classes/function.py#L565-L592
GiulioRossetti/dynetx
dynetx/classes/function.py
non_neighbors
def non_neighbors(graph, node, t=None): """Returns the non-neighbors of the node in the graph at time t. Parameters ---------- graph : DyNetx graph Graph to find neighbors. node : node The node whose neighbors will be returned. t : snapshot id (default=None) If None the non-neighbors are identified on the flattened graph. Returns ------- non_neighbors : iterator Iterator of nodes in the graph that are not neighbors of the node. """ if graph.is_directed(): values = chain(graph.predecessors(node, t=t), graph.successors(node, t=t)) else: values = graph.neighbors(node, t=t) nbors = set(values) | {node} return (nnode for nnode in graph if nnode not in nbors)
python
def non_neighbors(graph, node, t=None): """Returns the non-neighbors of the node in the graph at time t. Parameters ---------- graph : DyNetx graph Graph to find neighbors. node : node The node whose neighbors will be returned. t : snapshot id (default=None) If None the non-neighbors are identified on the flattened graph. Returns ------- non_neighbors : iterator Iterator of nodes in the graph that are not neighbors of the node. """ if graph.is_directed(): values = chain(graph.predecessors(node, t=t), graph.successors(node, t=t)) else: values = graph.neighbors(node, t=t) nbors = set(values) | {node} return (nnode for nnode in graph if nnode not in nbors)
Returns the non-neighbors of the node in the graph at time t. Parameters ---------- graph : DyNetx graph Graph to find neighbors. node : node The node whose neighbors will be returned. t : snapshot id (default=None) If None the non-neighbors are identified on the flattened graph. Returns ------- non_neighbors : iterator Iterator of nodes in the graph that are not neighbors of the node.
https://github.com/GiulioRossetti/dynetx/blob/634e2b38f8950885aebfa079dad7d5e8d7563f1d/dynetx/classes/function.py#L595-L621
GiulioRossetti/dynetx
dynetx/classes/function.py
non_interactions
def non_interactions(graph, t=None): """Returns the non-existent edges in the graph at time t. Parameters ---------- graph : NetworkX graph. Graph to find non-existent edges. t : snapshot id (default=None) If None the non-existent edges are identified on the flattened graph. Returns ------- non_edges : iterator Iterator of edges that are not in the graph. """ # if graph.is_directed(): # for u in graph: # for v in non_neighbors(graph, u, t): # yield (u, v) #else: nodes = set(graph) while nodes: u = nodes.pop() for v in nodes - set(graph[u]): yield (u, v)
python
def non_interactions(graph, t=None): """Returns the non-existent edges in the graph at time t. Parameters ---------- graph : NetworkX graph. Graph to find non-existent edges. t : snapshot id (default=None) If None the non-existent edges are identified on the flattened graph. Returns ------- non_edges : iterator Iterator of edges that are not in the graph. """ # if graph.is_directed(): # for u in graph: # for v in non_neighbors(graph, u, t): # yield (u, v) #else: nodes = set(graph) while nodes: u = nodes.pop() for v in nodes - set(graph[u]): yield (u, v)
Returns the non-existent edges in the graph at time t. Parameters ---------- graph : NetworkX graph. Graph to find non-existent edges. t : snapshot id (default=None) If None the non-existent edges are identified on the flattened graph. Returns ------- non_edges : iterator Iterator of edges that are not in the graph.
https://github.com/GiulioRossetti/dynetx/blob/634e2b38f8950885aebfa079dad7d5e8d7563f1d/dynetx/classes/function.py#L624-L651
kata198/func_timeout
func_timeout/exceptions.py
FunctionTimedOut.getMsg
def getMsg(self): ''' getMsg - Generate a default message based on parameters to FunctionTimedOut exception' @return <str> - Message ''' return 'Function %s (args=%s) (kwargs=%s) timed out after %f seconds.\n' %(self.timedOutFunction.__name__, repr(self.timedOutArgs), repr(self.timedOutKwargs), self.timedOutAfter)
python
def getMsg(self): ''' getMsg - Generate a default message based on parameters to FunctionTimedOut exception' @return <str> - Message ''' return 'Function %s (args=%s) (kwargs=%s) timed out after %f seconds.\n' %(self.timedOutFunction.__name__, repr(self.timedOutArgs), repr(self.timedOutKwargs), self.timedOutAfter)
getMsg - Generate a default message based on parameters to FunctionTimedOut exception' @return <str> - Message
https://github.com/kata198/func_timeout/blob/b427da2517266b31aa0d17c46e9cbeb5add8ef73/func_timeout/exceptions.py#L43-L49
kata198/func_timeout
func_timeout/exceptions.py
FunctionTimedOut.retry
def retry(self, timeout=RETRY_SAME_TIMEOUT): ''' retry - Retry the timed-out function with same arguments. @param timeout <float/RETRY_SAME_TIMEOUT/None> Default RETRY_SAME_TIMEOUT If RETRY_SAME_TIMEOUT : Will retry the function same args, same timeout If a float/int : Will retry the function same args with provided timeout If None : Will retry function same args no timeout @return - Returnval from function ''' if timeout is None: return self.timedOutFunction(*(self.timedOutArgs), **self.timedOutKwargs) from .dafunc import func_timeout if timeout == RETRY_SAME_TIMEOUT: timeout = self.timedOutAfter return func_timeout(timeout, self.timedOutFunction, args=self.timedOutArgs, kwargs=self.timedOutKwargs)
python
def retry(self, timeout=RETRY_SAME_TIMEOUT): ''' retry - Retry the timed-out function with same arguments. @param timeout <float/RETRY_SAME_TIMEOUT/None> Default RETRY_SAME_TIMEOUT If RETRY_SAME_TIMEOUT : Will retry the function same args, same timeout If a float/int : Will retry the function same args with provided timeout If None : Will retry function same args no timeout @return - Returnval from function ''' if timeout is None: return self.timedOutFunction(*(self.timedOutArgs), **self.timedOutKwargs) from .dafunc import func_timeout if timeout == RETRY_SAME_TIMEOUT: timeout = self.timedOutAfter return func_timeout(timeout, self.timedOutFunction, args=self.timedOutArgs, kwargs=self.timedOutKwargs)
retry - Retry the timed-out function with same arguments. @param timeout <float/RETRY_SAME_TIMEOUT/None> Default RETRY_SAME_TIMEOUT If RETRY_SAME_TIMEOUT : Will retry the function same args, same timeout If a float/int : Will retry the function same args with provided timeout If None : Will retry function same args no timeout @return - Returnval from function
https://github.com/kata198/func_timeout/blob/b427da2517266b31aa0d17c46e9cbeb5add8ef73/func_timeout/exceptions.py#L51-L71
kata198/func_timeout
func_timeout/dafunc.py
func_timeout
def func_timeout(timeout, func, args=(), kwargs=None): ''' func_timeout - Runs the given function for up to #timeout# seconds. Raises any exceptions #func# would raise, returns what #func# would return (unless timeout is exceeded), in which case it raises FunctionTimedOut @param timeout <float> - Maximum number of seconds to run #func# before terminating @param func <function> - The function to call @param args <tuple> - Any ordered arguments to pass to the function @param kwargs <dict/None> - Keyword arguments to pass to the function. @raises - FunctionTimedOut if #timeout# is exceeded, otherwise anything #func# could raise will be raised If the timeout is exceeded, FunctionTimedOut will be raised within the context of the called function every two seconds until it terminates, but will not block the calling thread (a new thread will be created to perform the join). If possible, you should try/except FunctionTimedOut to return cleanly, but in most cases it will 'just work'. @return - The return value that #func# gives ''' if not kwargs: kwargs = {} if not args: args = () ret = [] exception = [] isStopped = False def funcwrap(args2, kwargs2): try: ret.append( func(*args2, **kwargs2) ) except FunctionTimedOut: # Don't print traceback to stderr if we time out pass except Exception as e: exc_info = sys.exc_info() if isStopped is False: # Assemble the alternate traceback, excluding this function # from the trace (by going to next frame) # Pytohn3 reads native from __traceback__, # python2 has a different form for "raise" e.__traceback__ = exc_info[2].tb_next exception.append( e ) thread = StoppableThread(target=funcwrap, args=(args, kwargs)) thread.daemon = True thread.start() thread.join(timeout) stopException = None if thread.isAlive(): isStopped = True class FunctionTimedOutTempType(FunctionTimedOut): def __init__(self): return FunctionTimedOut.__init__(self, '', timeout, func, args, kwargs) FunctionTimedOutTemp = type('FunctionTimedOut' + str( hash( "%d_%d_%d_%d" %(id(timeout), id(func), id(args), id(kwargs))) ), FunctionTimedOutTempType.__bases__, dict(FunctionTimedOutTempType.__dict__)) stopException = FunctionTimedOutTemp thread._stopThread(stopException) thread.join(min(.1, timeout / 50.0)) raise FunctionTimedOut('', timeout, func, args, kwargs) else: # We can still cleanup the thread here.. # Still give a timeout... just... cuz.. thread.join(.5) if exception: raise_exception(exception) if ret: return ret[0]
python
def func_timeout(timeout, func, args=(), kwargs=None): ''' func_timeout - Runs the given function for up to #timeout# seconds. Raises any exceptions #func# would raise, returns what #func# would return (unless timeout is exceeded), in which case it raises FunctionTimedOut @param timeout <float> - Maximum number of seconds to run #func# before terminating @param func <function> - The function to call @param args <tuple> - Any ordered arguments to pass to the function @param kwargs <dict/None> - Keyword arguments to pass to the function. @raises - FunctionTimedOut if #timeout# is exceeded, otherwise anything #func# could raise will be raised If the timeout is exceeded, FunctionTimedOut will be raised within the context of the called function every two seconds until it terminates, but will not block the calling thread (a new thread will be created to perform the join). If possible, you should try/except FunctionTimedOut to return cleanly, but in most cases it will 'just work'. @return - The return value that #func# gives ''' if not kwargs: kwargs = {} if not args: args = () ret = [] exception = [] isStopped = False def funcwrap(args2, kwargs2): try: ret.append( func(*args2, **kwargs2) ) except FunctionTimedOut: # Don't print traceback to stderr if we time out pass except Exception as e: exc_info = sys.exc_info() if isStopped is False: # Assemble the alternate traceback, excluding this function # from the trace (by going to next frame) # Pytohn3 reads native from __traceback__, # python2 has a different form for "raise" e.__traceback__ = exc_info[2].tb_next exception.append( e ) thread = StoppableThread(target=funcwrap, args=(args, kwargs)) thread.daemon = True thread.start() thread.join(timeout) stopException = None if thread.isAlive(): isStopped = True class FunctionTimedOutTempType(FunctionTimedOut): def __init__(self): return FunctionTimedOut.__init__(self, '', timeout, func, args, kwargs) FunctionTimedOutTemp = type('FunctionTimedOut' + str( hash( "%d_%d_%d_%d" %(id(timeout), id(func), id(args), id(kwargs))) ), FunctionTimedOutTempType.__bases__, dict(FunctionTimedOutTempType.__dict__)) stopException = FunctionTimedOutTemp thread._stopThread(stopException) thread.join(min(.1, timeout / 50.0)) raise FunctionTimedOut('', timeout, func, args, kwargs) else: # We can still cleanup the thread here.. # Still give a timeout... just... cuz.. thread.join(.5) if exception: raise_exception(exception) if ret: return ret[0]
func_timeout - Runs the given function for up to #timeout# seconds. Raises any exceptions #func# would raise, returns what #func# would return (unless timeout is exceeded), in which case it raises FunctionTimedOut @param timeout <float> - Maximum number of seconds to run #func# before terminating @param func <function> - The function to call @param args <tuple> - Any ordered arguments to pass to the function @param kwargs <dict/None> - Keyword arguments to pass to the function. @raises - FunctionTimedOut if #timeout# is exceeded, otherwise anything #func# could raise will be raised If the timeout is exceeded, FunctionTimedOut will be raised within the context of the called function every two seconds until it terminates, but will not block the calling thread (a new thread will be created to perform the join). If possible, you should try/except FunctionTimedOut to return cleanly, but in most cases it will 'just work'. @return - The return value that #func# gives
https://github.com/kata198/func_timeout/blob/b427da2517266b31aa0d17c46e9cbeb5add8ef73/func_timeout/dafunc.py#L33-L111
kata198/func_timeout
func_timeout/dafunc.py
func_set_timeout
def func_set_timeout(timeout, allowOverride=False): ''' func_set_timeout - Decorator to run a function with a given/calculated timeout (max execution time). Optionally (if #allowOverride is True), adds a paramater, "forceTimeout", to the function which, if provided, will override the default timeout for that invocation. If #timeout is provided as a lambda/function, it will be called prior to each invocation of the decorated function to calculate the timeout to be used for that call, based on the arguments passed to the decorated function. For example, you may have a "processData" function whose execution time depends on the number of "data" elements, so you may want a million elements to have a much higher timeout than seven elements.) If #allowOverride is True AND a kwarg of "forceTimeout" is passed to the wrapped function, that timeout will be used for that single call. @param timeout <float OR lambda/function> - **If float:** Default number of seconds max to allow function to execute before throwing FunctionTimedOut **If lambda/function: If a function/lambda is provided, it will be called for every invocation of the decorated function (unless #allowOverride=True and "forceTimeout" was passed) to determine the timeout to use based on the arguments to the decorated function. The arguments as passed into the decorated function will be passed to this function. They either must match exactly to what the decorated function has, OR if you prefer to get the *args (list of ordered args) and **kwargs ( key : value keyword args form), define your calculate function like: def calculateTimeout(*args, **kwargs): ... or lambda like: calculateTimeout = lambda *args, **kwargs : ... otherwise the args to your calculate function should match exactly the decorated function. @param allowOverride <bool> Default False, if True adds a keyword argument to the decorated function, "forceTimeout" which, if provided, will override the #timeout. If #timeout was provided as a lambda / function, it will not be called. @throws FunctionTimedOut If time alloted passes without function returning naturally @see func_timeout ''' # Try to be as efficent as possible... don't compare the args more than once # Helps closure issue on some versions of python defaultTimeout = copy.copy(timeout) isTimeoutAFunction = bool( issubclass(timeout.__class__, (types.FunctionType, types.MethodType, types.LambdaType, types.BuiltinFunctionType, types.BuiltinMethodType) ) ) if not isTimeoutAFunction: if not issubclass(timeout.__class__, (float, int)): try: timeout = float(timeout) except: raise ValueError('timeout argument must be a float/int for number of seconds, or a function/lambda which gets passed the function arguments and returns a calculated timeout (as float or int). Passed type: < %s > is not of any of these, and cannot be converted to a float.' %( timeout.__class__.__name__, )) if not allowOverride and not isTimeoutAFunction: # Only defaultTimeout provided. Simple function wrapper def _function_decorator(func): return wraps(func)(lambda *args, **kwargs : func_timeout(defaultTimeout, func, args=args, kwargs=kwargs)) # def _function_wrapper(*args, **kwargs): # return func_timeout(defaultTimeout, func, args=args, kwargs=kwargs) # return _function_wrapper return _function_decorator if not isTimeoutAFunction: # allowOverride is True and timeout is not a function. Simple conditional on every call def _function_decorator(func): def _function_wrapper(*args, **kwargs): if 'forceTimeout' in kwargs: useTimeout = kwargs.pop('forceTimeout') else: useTimeout = defaultTimeout return func_timeout(useTimeout, func, args=args, kwargs=kwargs) return wraps(func)(_function_wrapper) return _function_decorator # At this point, timeout IS known to be a function. timeoutFunction = timeout if allowOverride: # Could use a lambda here... but want traceback to highlight the calculate function, # and not the invoked function def _function_decorator(func): def _function_wrapper(*args, **kwargs): if 'forceTimeout' in kwargs: useTimeout = kwargs.pop('forceTimeout') else: useTimeout = timeoutFunction(*args, **kwargs) return func_timeout(useTimeout, func, args=args, kwargs=kwargs) return wraps(func)(_function_wrapper) return _function_decorator # Cannot override, and calculate timeout function def _function_decorator(func): def _function_wrapper(*args, **kwargs): useTimeout = timeoutFunction(*args, **kwargs) return func_timeout(useTimeout, func, args=args, kwargs=kwargs) return wraps(func)(_function_wrapper) return _function_decorator
python
def func_set_timeout(timeout, allowOverride=False): ''' func_set_timeout - Decorator to run a function with a given/calculated timeout (max execution time). Optionally (if #allowOverride is True), adds a paramater, "forceTimeout", to the function which, if provided, will override the default timeout for that invocation. If #timeout is provided as a lambda/function, it will be called prior to each invocation of the decorated function to calculate the timeout to be used for that call, based on the arguments passed to the decorated function. For example, you may have a "processData" function whose execution time depends on the number of "data" elements, so you may want a million elements to have a much higher timeout than seven elements.) If #allowOverride is True AND a kwarg of "forceTimeout" is passed to the wrapped function, that timeout will be used for that single call. @param timeout <float OR lambda/function> - **If float:** Default number of seconds max to allow function to execute before throwing FunctionTimedOut **If lambda/function: If a function/lambda is provided, it will be called for every invocation of the decorated function (unless #allowOverride=True and "forceTimeout" was passed) to determine the timeout to use based on the arguments to the decorated function. The arguments as passed into the decorated function will be passed to this function. They either must match exactly to what the decorated function has, OR if you prefer to get the *args (list of ordered args) and **kwargs ( key : value keyword args form), define your calculate function like: def calculateTimeout(*args, **kwargs): ... or lambda like: calculateTimeout = lambda *args, **kwargs : ... otherwise the args to your calculate function should match exactly the decorated function. @param allowOverride <bool> Default False, if True adds a keyword argument to the decorated function, "forceTimeout" which, if provided, will override the #timeout. If #timeout was provided as a lambda / function, it will not be called. @throws FunctionTimedOut If time alloted passes without function returning naturally @see func_timeout ''' # Try to be as efficent as possible... don't compare the args more than once # Helps closure issue on some versions of python defaultTimeout = copy.copy(timeout) isTimeoutAFunction = bool( issubclass(timeout.__class__, (types.FunctionType, types.MethodType, types.LambdaType, types.BuiltinFunctionType, types.BuiltinMethodType) ) ) if not isTimeoutAFunction: if not issubclass(timeout.__class__, (float, int)): try: timeout = float(timeout) except: raise ValueError('timeout argument must be a float/int for number of seconds, or a function/lambda which gets passed the function arguments and returns a calculated timeout (as float or int). Passed type: < %s > is not of any of these, and cannot be converted to a float.' %( timeout.__class__.__name__, )) if not allowOverride and not isTimeoutAFunction: # Only defaultTimeout provided. Simple function wrapper def _function_decorator(func): return wraps(func)(lambda *args, **kwargs : func_timeout(defaultTimeout, func, args=args, kwargs=kwargs)) # def _function_wrapper(*args, **kwargs): # return func_timeout(defaultTimeout, func, args=args, kwargs=kwargs) # return _function_wrapper return _function_decorator if not isTimeoutAFunction: # allowOverride is True and timeout is not a function. Simple conditional on every call def _function_decorator(func): def _function_wrapper(*args, **kwargs): if 'forceTimeout' in kwargs: useTimeout = kwargs.pop('forceTimeout') else: useTimeout = defaultTimeout return func_timeout(useTimeout, func, args=args, kwargs=kwargs) return wraps(func)(_function_wrapper) return _function_decorator # At this point, timeout IS known to be a function. timeoutFunction = timeout if allowOverride: # Could use a lambda here... but want traceback to highlight the calculate function, # and not the invoked function def _function_decorator(func): def _function_wrapper(*args, **kwargs): if 'forceTimeout' in kwargs: useTimeout = kwargs.pop('forceTimeout') else: useTimeout = timeoutFunction(*args, **kwargs) return func_timeout(useTimeout, func, args=args, kwargs=kwargs) return wraps(func)(_function_wrapper) return _function_decorator # Cannot override, and calculate timeout function def _function_decorator(func): def _function_wrapper(*args, **kwargs): useTimeout = timeoutFunction(*args, **kwargs) return func_timeout(useTimeout, func, args=args, kwargs=kwargs) return wraps(func)(_function_wrapper) return _function_decorator
func_set_timeout - Decorator to run a function with a given/calculated timeout (max execution time). Optionally (if #allowOverride is True), adds a paramater, "forceTimeout", to the function which, if provided, will override the default timeout for that invocation. If #timeout is provided as a lambda/function, it will be called prior to each invocation of the decorated function to calculate the timeout to be used for that call, based on the arguments passed to the decorated function. For example, you may have a "processData" function whose execution time depends on the number of "data" elements, so you may want a million elements to have a much higher timeout than seven elements.) If #allowOverride is True AND a kwarg of "forceTimeout" is passed to the wrapped function, that timeout will be used for that single call. @param timeout <float OR lambda/function> - **If float:** Default number of seconds max to allow function to execute before throwing FunctionTimedOut **If lambda/function: If a function/lambda is provided, it will be called for every invocation of the decorated function (unless #allowOverride=True and "forceTimeout" was passed) to determine the timeout to use based on the arguments to the decorated function. The arguments as passed into the decorated function will be passed to this function. They either must match exactly to what the decorated function has, OR if you prefer to get the *args (list of ordered args) and **kwargs ( key : value keyword args form), define your calculate function like: def calculateTimeout(*args, **kwargs): ... or lambda like: calculateTimeout = lambda *args, **kwargs : ... otherwise the args to your calculate function should match exactly the decorated function. @param allowOverride <bool> Default False, if True adds a keyword argument to the decorated function, "forceTimeout" which, if provided, will override the #timeout. If #timeout was provided as a lambda / function, it will not be called. @throws FunctionTimedOut If time alloted passes without function returning naturally @see func_timeout
https://github.com/kata198/func_timeout/blob/b427da2517266b31aa0d17c46e9cbeb5add8ef73/func_timeout/dafunc.py#L114-L233
gplepage/vegas
examples/path-integral.py
analyze_theory
def analyze_theory(V, x0list=[], plot=False): """ Extract ground-state energy E0 and psi**2 for potential V. """ # initialize path integral T = 4. ndT = 8. # use larger ndT to reduce discretization error (goes like 1/ndT**2) neval = 3e5 # should probably use more evaluations (10x?) nitn = 6 alpha = 0.1 # damp adaptation # create integrator and train it (no x0list) integrand = PathIntegrand(V=V, T=T, ndT=ndT) integ = vegas.Integrator(integrand.region, alpha=alpha) integ(integrand, neval=neval, nitn=nitn / 2, alpha=2 * alpha) # evaluate path integral with trained integrator and x0list integrand = PathIntegrand(V=V, x0list=x0list, T=T, ndT=ndT) results = integ(integrand, neval=neval, nitn=nitn, alpha=alpha) print(results.summary()) E0 = -np.log(results['exp(-E0*T)']) / T print('Ground-state energy = %s Q = %.2f\n' % (E0, results.Q)) if len(x0list) <= 0: return E0 psi2 = results['exp(-E0*T) * psi(x0)**2'] / results['exp(-E0*T)'] print('%5s %-12s %-10s' % ('x', 'psi**2', 'sho-exact')) print(27 * '-') for i, (x0i, psi2i) in enumerate(zip(x0list, psi2)): exact = np.exp(- x0i ** 2) / np.sqrt(np.pi) #* np.exp(-T / 2.) print( "%5.1f %-12s %-10.5f" % (x0i, psi2i, exact) ) if plot: plot_results(E0, x0list, psi2, T) return E0
python
def analyze_theory(V, x0list=[], plot=False): """ Extract ground-state energy E0 and psi**2 for potential V. """ # initialize path integral T = 4. ndT = 8. # use larger ndT to reduce discretization error (goes like 1/ndT**2) neval = 3e5 # should probably use more evaluations (10x?) nitn = 6 alpha = 0.1 # damp adaptation # create integrator and train it (no x0list) integrand = PathIntegrand(V=V, T=T, ndT=ndT) integ = vegas.Integrator(integrand.region, alpha=alpha) integ(integrand, neval=neval, nitn=nitn / 2, alpha=2 * alpha) # evaluate path integral with trained integrator and x0list integrand = PathIntegrand(V=V, x0list=x0list, T=T, ndT=ndT) results = integ(integrand, neval=neval, nitn=nitn, alpha=alpha) print(results.summary()) E0 = -np.log(results['exp(-E0*T)']) / T print('Ground-state energy = %s Q = %.2f\n' % (E0, results.Q)) if len(x0list) <= 0: return E0 psi2 = results['exp(-E0*T) * psi(x0)**2'] / results['exp(-E0*T)'] print('%5s %-12s %-10s' % ('x', 'psi**2', 'sho-exact')) print(27 * '-') for i, (x0i, psi2i) in enumerate(zip(x0list, psi2)): exact = np.exp(- x0i ** 2) / np.sqrt(np.pi) #* np.exp(-T / 2.) print( "%5.1f %-12s %-10.5f" % (x0i, psi2i, exact) ) if plot: plot_results(E0, x0list, psi2, T) return E0
Extract ground-state energy E0 and psi**2 for potential V.
https://github.com/gplepage/vegas/blob/537aaa35938d521bbf7479b2be69170b9282f544/examples/path-integral.py#L82-L116
kata198/func_timeout
func_timeout/StoppableThread.py
StoppableThread._stopThread
def _stopThread(self, exception, raiseEvery=2.0): ''' _stopThread - @see StoppableThread.stop ''' if self.isAlive() is False: return True self._stderr = open(os.devnull, 'w') # Create "joining" thread which will raise the provided exception # on a repeat, until the thread stops. joinThread = JoinThread(self, exception, repeatEvery=raiseEvery) # Try to prevent spurrious prints joinThread._stderr = self._stderr joinThread.start() joinThread._stderr = self._stderr
python
def _stopThread(self, exception, raiseEvery=2.0): ''' _stopThread - @see StoppableThread.stop ''' if self.isAlive() is False: return True self._stderr = open(os.devnull, 'w') # Create "joining" thread which will raise the provided exception # on a repeat, until the thread stops. joinThread = JoinThread(self, exception, repeatEvery=raiseEvery) # Try to prevent spurrious prints joinThread._stderr = self._stderr joinThread.start() joinThread._stderr = self._stderr
_stopThread - @see StoppableThread.stop
https://github.com/kata198/func_timeout/blob/b427da2517266b31aa0d17c46e9cbeb5add8ef73/func_timeout/StoppableThread.py#L37-L53
kata198/func_timeout
func_timeout/StoppableThread.py
JoinThread.run
def run(self): ''' run - The thread main. Will attempt to stop and join the attached thread. ''' # Try to silence default exception printing. self.otherThread._Thread__stderr = self._stderr if hasattr(self.otherThread, '_Thread__stop'): # If py2, call this first to start thread termination cleanly. # Python3 does not need such ( nor does it provide.. ) self.otherThread._Thread__stop() while self.otherThread.isAlive(): # We loop raising exception incase it's caught hopefully this breaks us far out. ctypes.pythonapi.PyThreadState_SetAsyncExc(ctypes.c_long(self.otherThread.ident), ctypes.py_object(self.exception)) self.otherThread.join(self.repeatEvery) try: self._stderr.close() except: pass
python
def run(self): ''' run - The thread main. Will attempt to stop and join the attached thread. ''' # Try to silence default exception printing. self.otherThread._Thread__stderr = self._stderr if hasattr(self.otherThread, '_Thread__stop'): # If py2, call this first to start thread termination cleanly. # Python3 does not need such ( nor does it provide.. ) self.otherThread._Thread__stop() while self.otherThread.isAlive(): # We loop raising exception incase it's caught hopefully this breaks us far out. ctypes.pythonapi.PyThreadState_SetAsyncExc(ctypes.c_long(self.otherThread.ident), ctypes.py_object(self.exception)) self.otherThread.join(self.repeatEvery) try: self._stderr.close() except: pass
run - The thread main. Will attempt to stop and join the attached thread.
https://github.com/kata198/func_timeout/blob/b427da2517266b31aa0d17c46e9cbeb5add8ef73/func_timeout/StoppableThread.py#L94-L113
gplepage/vegas
setup.py
build_py.run
def run(self): """ Append version number to vegas/__init__.py """ with open('src/vegas/__init__.py', 'a') as vfile: vfile.write("\n__version__ = '%s'\n" % VEGAS_VERSION) _build_py.run(self)
python
def run(self): """ Append version number to vegas/__init__.py """ with open('src/vegas/__init__.py', 'a') as vfile: vfile.write("\n__version__ = '%s'\n" % VEGAS_VERSION) _build_py.run(self)
Append version number to vegas/__init__.py
https://github.com/gplepage/vegas/blob/537aaa35938d521bbf7479b2be69170b9282f544/setup.py#L43-L47
gplepage/vegas
src/vegas/__init__.py
PDFIntegrator._make_map
def _make_map(self, limit): """ Make vegas grid that is adapted to the pdf. """ ny = 2000 y = numpy.random.uniform(0., 1., (ny,1)) limit = numpy.arctan(limit) m = AdaptiveMap([[-limit, limit]], ninc=100) theta = numpy.empty(y.shape, float) jac = numpy.empty(y.shape[0], float) for itn in range(10): m.map(y, theta, jac) tan_theta = numpy.tan(theta[:, 0]) x = self.scale * tan_theta fx = (tan_theta ** 2 + 1) * numpy.exp(-(x ** 2) / 2.) m.add_training_data(y, (jac * fx) ** 2) m.adapt(alpha=1.5) return numpy.array(m.grid[0])
python
def _make_map(self, limit): """ Make vegas grid that is adapted to the pdf. """ ny = 2000 y = numpy.random.uniform(0., 1., (ny,1)) limit = numpy.arctan(limit) m = AdaptiveMap([[-limit, limit]], ninc=100) theta = numpy.empty(y.shape, float) jac = numpy.empty(y.shape[0], float) for itn in range(10): m.map(y, theta, jac) tan_theta = numpy.tan(theta[:, 0]) x = self.scale * tan_theta fx = (tan_theta ** 2 + 1) * numpy.exp(-(x ** 2) / 2.) m.add_training_data(y, (jac * fx) ** 2) m.adapt(alpha=1.5) return numpy.array(m.grid[0])
Make vegas grid that is adapted to the pdf.
https://github.com/gplepage/vegas/blob/537aaa35938d521bbf7479b2be69170b9282f544/src/vegas/__init__.py#L200-L215
gplepage/vegas
src/vegas/__init__.py
PDFIntegrator._expval
def _expval(self, f, nopdf): """ Return integrand using the tan mapping. """ def ff(theta, nopdf=nopdf): tan_theta = numpy.tan(theta) x = self.scale * tan_theta jac = self.scale * (tan_theta ** 2 + 1.) if nopdf: pdf = jac * self.pdf.pjac[None, :] else: pdf = jac * numpy.exp(-(x ** 2) / 2.) / numpy.sqrt(2 * numpy.pi) dp = self.pdf.x2dpflat(x) parg = None ans = None fparg_is_dict = False # iterate through the batch for i, (dpi, pdfi) in enumerate(zip(dp, pdf)): p = self.pdf.meanflat + dpi if parg is None: # first time only if self.pdf.shape is None: parg = _gvar.BufferDict(self.pdf.g, buf=p) else: parg = p.reshape(self.pdf.shape) else: if parg.shape is None: parg.buf = p else: parg.flat[:] = p fparg = 1. if f is None else f(parg) if ans is None: # first time only if hasattr(fparg, 'keys'): fparg_is_dict = True if not isinstance(fparg, _gvar.BufferDict): fparg = _gvar.BufferDict(fparg) ans = _gvar.BufferDict() for k in fparg: ans[k] = numpy.empty( (len(pdf),) + fparg.slice_shape(k)[1], float ) else: if numpy.shape(fparg) == (): ans = numpy.empty(len(pdf), float) else: ans = numpy.empty( (len(pdf),) + numpy.shape(fparg), float ) if fparg_is_dict: prod_pdfi = numpy.prod(pdfi) for k in ans: ans[k][i] = fparg[k] ans[k][i] *= prod_pdfi else: if not isinstance(fparg, numpy.ndarray): fparg = numpy.asarray(fparg) ans[i] = fparg * numpy.prod(pdfi) return ans return ff
python
def _expval(self, f, nopdf): """ Return integrand using the tan mapping. """ def ff(theta, nopdf=nopdf): tan_theta = numpy.tan(theta) x = self.scale * tan_theta jac = self.scale * (tan_theta ** 2 + 1.) if nopdf: pdf = jac * self.pdf.pjac[None, :] else: pdf = jac * numpy.exp(-(x ** 2) / 2.) / numpy.sqrt(2 * numpy.pi) dp = self.pdf.x2dpflat(x) parg = None ans = None fparg_is_dict = False # iterate through the batch for i, (dpi, pdfi) in enumerate(zip(dp, pdf)): p = self.pdf.meanflat + dpi if parg is None: # first time only if self.pdf.shape is None: parg = _gvar.BufferDict(self.pdf.g, buf=p) else: parg = p.reshape(self.pdf.shape) else: if parg.shape is None: parg.buf = p else: parg.flat[:] = p fparg = 1. if f is None else f(parg) if ans is None: # first time only if hasattr(fparg, 'keys'): fparg_is_dict = True if not isinstance(fparg, _gvar.BufferDict): fparg = _gvar.BufferDict(fparg) ans = _gvar.BufferDict() for k in fparg: ans[k] = numpy.empty( (len(pdf),) + fparg.slice_shape(k)[1], float ) else: if numpy.shape(fparg) == (): ans = numpy.empty(len(pdf), float) else: ans = numpy.empty( (len(pdf),) + numpy.shape(fparg), float ) if fparg_is_dict: prod_pdfi = numpy.prod(pdfi) for k in ans: ans[k][i] = fparg[k] ans[k][i] *= prod_pdfi else: if not isinstance(fparg, numpy.ndarray): fparg = numpy.asarray(fparg) ans[i] = fparg * numpy.prod(pdfi) return ans return ff
Return integrand using the tan mapping.
https://github.com/gplepage/vegas/blob/537aaa35938d521bbf7479b2be69170b9282f544/src/vegas/__init__.py#L249-L306
audreyr/binaryornot
binaryornot/helpers.py
get_starting_chunk
def get_starting_chunk(filename, length=1024): """ :param filename: File to open and get the first little chunk of. :param length: Number of bytes to read, default 1024. :returns: Starting chunk of bytes. """ # Ensure we open the file in binary mode try: with open(filename, 'rb') as f: chunk = f.read(length) return chunk except IOError as e: print(e)
python
def get_starting_chunk(filename, length=1024): """ :param filename: File to open and get the first little chunk of. :param length: Number of bytes to read, default 1024. :returns: Starting chunk of bytes. """ # Ensure we open the file in binary mode try: with open(filename, 'rb') as f: chunk = f.read(length) return chunk except IOError as e: print(e)
:param filename: File to open and get the first little chunk of. :param length: Number of bytes to read, default 1024. :returns: Starting chunk of bytes.
https://github.com/audreyr/binaryornot/blob/16d9702d49e45daa27f10a681129e42211ed0d92/binaryornot/helpers.py#L25-L37
Magnetic/cycy
cycy/repl.py
REPL.dump
def dump(self, function_name): """ Pretty-dump the bytecode for the function with the given name. """ assert isinstance(function_name, str) self.stdout.write(function_name) self.stdout.write("\n") self.stdout.write("-" * len(function_name)) self.stdout.write("\n\n") byte_code = self.interpreter.compiled_functions[function_name] self.stdout.write(byte_code.dump()) self.stdout.write("\n")
python
def dump(self, function_name): """ Pretty-dump the bytecode for the function with the given name. """ assert isinstance(function_name, str) self.stdout.write(function_name) self.stdout.write("\n") self.stdout.write("-" * len(function_name)) self.stdout.write("\n\n") byte_code = self.interpreter.compiled_functions[function_name] self.stdout.write(byte_code.dump()) self.stdout.write("\n")
Pretty-dump the bytecode for the function with the given name.
https://github.com/Magnetic/cycy/blob/494282a37b5f7d1eaa17b8d01796df8302da2a81/cycy/repl.py#L76-L90
jmfederico/django-use-email-as-username
django_use_email_as_username/management/commands/create_custom_user_app.py
Command.handle
def handle(self, **options): """Call "startapp" to generate app with custom user model.""" template = os.path.dirname(os.path.abspath(__file__)) + "/app_template" name = options.pop("name") call_command("startapp", name, template=template, **options)
python
def handle(self, **options): """Call "startapp" to generate app with custom user model.""" template = os.path.dirname(os.path.abspath(__file__)) + "/app_template" name = options.pop("name") call_command("startapp", name, template=template, **options)
Call "startapp" to generate app with custom user model.
https://github.com/jmfederico/django-use-email-as-username/blob/401e404b822f7ba5b3ef34b06ce095e564f32912/django_use_email_as_username/management/commands/create_custom_user_app.py#L26-L30
jmfederico/django-use-email-as-username
django_use_email_as_username/models.py
BaseUserManager._create_user
def _create_user(self, email, password, **extra_fields): """Create and save a User with the given email and password.""" if not email: raise ValueError("The given email must be set") email = self.normalize_email(email) user = self.model(email=email, **extra_fields) user.set_password(password) user.save(using=self._db) return user
python
def _create_user(self, email, password, **extra_fields): """Create and save a User with the given email and password.""" if not email: raise ValueError("The given email must be set") email = self.normalize_email(email) user = self.model(email=email, **extra_fields) user.set_password(password) user.save(using=self._db) return user
Create and save a User with the given email and password.
https://github.com/jmfederico/django-use-email-as-username/blob/401e404b822f7ba5b3ef34b06ce095e564f32912/django_use_email_as_username/models.py#L14-L22
Mapkin/osmgraph
osmgraph/tools.py
nwise
def nwise(iterable, n): """ Iterate through a sequence with a defined length window >>> list(nwise(range(8), 3)) [(0, 1, 2), (1, 2, 3), (2, 3, 4), (3, 4, 5), (5, 6, 7)] >>> list(nwise(range(3), 5)) [] Parameters ---------- iterable n : length of each sequence Yields ------ Tuples of length n """ iters = itertools.tee(iterable, n) iters = (itertools.islice(it, i, None) for i, it in enumerate(iters)) return itertools.izip(*iters)
python
def nwise(iterable, n): """ Iterate through a sequence with a defined length window >>> list(nwise(range(8), 3)) [(0, 1, 2), (1, 2, 3), (2, 3, 4), (3, 4, 5), (5, 6, 7)] >>> list(nwise(range(3), 5)) [] Parameters ---------- iterable n : length of each sequence Yields ------ Tuples of length n """ iters = itertools.tee(iterable, n) iters = (itertools.islice(it, i, None) for i, it in enumerate(iters)) return itertools.izip(*iters)
Iterate through a sequence with a defined length window >>> list(nwise(range(8), 3)) [(0, 1, 2), (1, 2, 3), (2, 3, 4), (3, 4, 5), (5, 6, 7)] >>> list(nwise(range(3), 5)) [] Parameters ---------- iterable n : length of each sequence Yields ------ Tuples of length n
https://github.com/Mapkin/osmgraph/blob/4f8e6466c11edbe30f1bbefc939e5613860a43b4/osmgraph/tools.py#L6-L29
Mapkin/osmgraph
osmgraph/tools.py
coordinates
def coordinates(g, nodes): """ Extract (lon, lat) coordinate pairs from nodes in an osmgraph osmgraph nodes have a 'coordinate' property on each node. This is a shortcut for extracting a coordinate list from an iterable of nodes >>> g = osmgraph.parse_file(filename) >>> node_ids = g.nodes()[:3] # Grab 3 nodes [61341696, 61341697, 61341698] >>> coords = coordinates(g, node_ids) [(-71.0684107, 42.3516822), (-71.133251, 42.350308), (-71.170641, 42.352689)] Parameters ---------- g : networkx graph created with osmgraph nodes : iterable of node ids Returns ------- List of (lon, lat) coordinate pairs """ c = [g.node[n]['coordinate'] for n in nodes] return c
python
def coordinates(g, nodes): """ Extract (lon, lat) coordinate pairs from nodes in an osmgraph osmgraph nodes have a 'coordinate' property on each node. This is a shortcut for extracting a coordinate list from an iterable of nodes >>> g = osmgraph.parse_file(filename) >>> node_ids = g.nodes()[:3] # Grab 3 nodes [61341696, 61341697, 61341698] >>> coords = coordinates(g, node_ids) [(-71.0684107, 42.3516822), (-71.133251, 42.350308), (-71.170641, 42.352689)] Parameters ---------- g : networkx graph created with osmgraph nodes : iterable of node ids Returns ------- List of (lon, lat) coordinate pairs """ c = [g.node[n]['coordinate'] for n in nodes] return c
Extract (lon, lat) coordinate pairs from nodes in an osmgraph osmgraph nodes have a 'coordinate' property on each node. This is a shortcut for extracting a coordinate list from an iterable of nodes >>> g = osmgraph.parse_file(filename) >>> node_ids = g.nodes()[:3] # Grab 3 nodes [61341696, 61341697, 61341698] >>> coords = coordinates(g, node_ids) [(-71.0684107, 42.3516822), (-71.133251, 42.350308), (-71.170641, 42.352689)] Parameters ---------- g : networkx graph created with osmgraph nodes : iterable of node ids Returns ------- List of (lon, lat) coordinate pairs
https://github.com/Mapkin/osmgraph/blob/4f8e6466c11edbe30f1bbefc939e5613860a43b4/osmgraph/tools.py#L45-L72
Mapkin/osmgraph
osmgraph/tools.py
step
def step(g, n1, n2, inbound=False, backward=False, continue_fn=None): """ Step along a path through a directed graph unless there is an intersection Example graph: Note that edge (1, 2) and (2, 3) are bidirectional, i.e., (2, 1) and (3, 2) are also edges 1 -- 2 -- 3 -->-- 5 -->-- 7 | | ^ v | | 4 6 >>> step(g, 1, 2) 3 >>> step(g, 3, 5) None >>> step(g, 2, 3) 5 >>> step(g, 2, 3, inbound=True) None >>> step(g, 7, 5, 3, backward=True) 3 >>> def f(g, n1, n2, backward): if n2 == 5: return 7 return None >>> step(g, 3, 5, continue_fn=f) 7 Parameters ---------- g : networkx DiGraph n1 : node id in g n2 : node id in g (n1, n2) must be an edge in g inbound : bool (default False) whether incoming edges should be considered backward : bool (default False) whether edges are in reverse order (i.e., point from n2 to n1) continue_fn : callable (optional) if at an intersection, continue_fn is called to indicate how to proceed continue_fn takes the form: f(g, n1, n2, backward) where all arguments are as passed into step. f should return a node id such that f(g, n1, n2, backward) is a successors of n2. f should return None if no way forward. Returns ------- The next node in the path from n1 to n2. Returns None if there are no edges from n2 or multiple edges from n2 """ forw = g.successors back = g.predecessors if backward: back, forw = forw, back nodes = forw(n2) if inbound: nodes = set(nodes + back(n2)) candidates = [n for n in nodes if n != n1] if len(candidates) == 1: result = candidates[0] elif continue_fn: result = continue_fn(g, n1, n2, backward) else: result = None return result
python
def step(g, n1, n2, inbound=False, backward=False, continue_fn=None): """ Step along a path through a directed graph unless there is an intersection Example graph: Note that edge (1, 2) and (2, 3) are bidirectional, i.e., (2, 1) and (3, 2) are also edges 1 -- 2 -- 3 -->-- 5 -->-- 7 | | ^ v | | 4 6 >>> step(g, 1, 2) 3 >>> step(g, 3, 5) None >>> step(g, 2, 3) 5 >>> step(g, 2, 3, inbound=True) None >>> step(g, 7, 5, 3, backward=True) 3 >>> def f(g, n1, n2, backward): if n2 == 5: return 7 return None >>> step(g, 3, 5, continue_fn=f) 7 Parameters ---------- g : networkx DiGraph n1 : node id in g n2 : node id in g (n1, n2) must be an edge in g inbound : bool (default False) whether incoming edges should be considered backward : bool (default False) whether edges are in reverse order (i.e., point from n2 to n1) continue_fn : callable (optional) if at an intersection, continue_fn is called to indicate how to proceed continue_fn takes the form: f(g, n1, n2, backward) where all arguments are as passed into step. f should return a node id such that f(g, n1, n2, backward) is a successors of n2. f should return None if no way forward. Returns ------- The next node in the path from n1 to n2. Returns None if there are no edges from n2 or multiple edges from n2 """ forw = g.successors back = g.predecessors if backward: back, forw = forw, back nodes = forw(n2) if inbound: nodes = set(nodes + back(n2)) candidates = [n for n in nodes if n != n1] if len(candidates) == 1: result = candidates[0] elif continue_fn: result = continue_fn(g, n1, n2, backward) else: result = None return result
Step along a path through a directed graph unless there is an intersection Example graph: Note that edge (1, 2) and (2, 3) are bidirectional, i.e., (2, 1) and (3, 2) are also edges 1 -- 2 -- 3 -->-- 5 -->-- 7 | | ^ v | | 4 6 >>> step(g, 1, 2) 3 >>> step(g, 3, 5) None >>> step(g, 2, 3) 5 >>> step(g, 2, 3, inbound=True) None >>> step(g, 7, 5, 3, backward=True) 3 >>> def f(g, n1, n2, backward): if n2 == 5: return 7 return None >>> step(g, 3, 5, continue_fn=f) 7 Parameters ---------- g : networkx DiGraph n1 : node id in g n2 : node id in g (n1, n2) must be an edge in g inbound : bool (default False) whether incoming edges should be considered backward : bool (default False) whether edges are in reverse order (i.e., point from n2 to n1) continue_fn : callable (optional) if at an intersection, continue_fn is called to indicate how to proceed continue_fn takes the form: f(g, n1, n2, backward) where all arguments are as passed into step. f should return a node id such that f(g, n1, n2, backward) is a successors of n2. f should return None if no way forward. Returns ------- The next node in the path from n1 to n2. Returns None if there are no edges from n2 or multiple edges from n2
https://github.com/Mapkin/osmgraph/blob/4f8e6466c11edbe30f1bbefc939e5613860a43b4/osmgraph/tools.py#L75-L151
Mapkin/osmgraph
osmgraph/tools.py
move
def move(g, n1, n2, **kwargs): """ Step along a graph until it ends or reach an intersection Example graph: Note that edge (1, 2) and (2, 3) are bidirectional, i.e., (2, 1) and (3, 2) are also edges 1 -- 2 -- 3 -->-- 5 -->-- 7 | | ^ v | | 4 6 >>> list(move(g, 1, 2)) [1, 2, 3, 5] # Stops at 5 because you can get to both 6 and 7 from 3 >>> step(g, 1, 2, inbound=True) [1, 2, 3] Parameters ---------- Same as step() Yields ------ Node IDs until either there is no path forward or the path reaches an intersection """ prev = n1 curr = n2 _next = step(g, prev, curr, **kwargs) yield prev yield curr visited_nodes = set([prev, curr]) while _next: yield _next if _next in visited_nodes: return visited_nodes.add(_next) prev = curr curr = _next _next = step(g, prev, curr, **kwargs)
python
def move(g, n1, n2, **kwargs): """ Step along a graph until it ends or reach an intersection Example graph: Note that edge (1, 2) and (2, 3) are bidirectional, i.e., (2, 1) and (3, 2) are also edges 1 -- 2 -- 3 -->-- 5 -->-- 7 | | ^ v | | 4 6 >>> list(move(g, 1, 2)) [1, 2, 3, 5] # Stops at 5 because you can get to both 6 and 7 from 3 >>> step(g, 1, 2, inbound=True) [1, 2, 3] Parameters ---------- Same as step() Yields ------ Node IDs until either there is no path forward or the path reaches an intersection """ prev = n1 curr = n2 _next = step(g, prev, curr, **kwargs) yield prev yield curr visited_nodes = set([prev, curr]) while _next: yield _next if _next in visited_nodes: return visited_nodes.add(_next) prev = curr curr = _next _next = step(g, prev, curr, **kwargs)
Step along a graph until it ends or reach an intersection Example graph: Note that edge (1, 2) and (2, 3) are bidirectional, i.e., (2, 1) and (3, 2) are also edges 1 -- 2 -- 3 -->-- 5 -->-- 7 | | ^ v | | 4 6 >>> list(move(g, 1, 2)) [1, 2, 3, 5] # Stops at 5 because you can get to both 6 and 7 from 3 >>> step(g, 1, 2, inbound=True) [1, 2, 3] Parameters ---------- Same as step() Yields ------ Node IDs until either there is no path forward or the path reaches an intersection
https://github.com/Mapkin/osmgraph/blob/4f8e6466c11edbe30f1bbefc939e5613860a43b4/osmgraph/tools.py#L154-L200
Mapkin/osmgraph
osmgraph/tools.py
is_intersection
def is_intersection(g, n): """ Determine if a node is an intersection graph: 1 -->-- 2 -->-- 3 >>> is_intersection(g, 2) False graph: 1 -- 2 -- 3 | 4 >>> is_intersection(g, 2) True Parameters ---------- g : networkx DiGraph n : node id Returns ------- bool """ return len(set(g.predecessors(n) + g.successors(n))) > 2
python
def is_intersection(g, n): """ Determine if a node is an intersection graph: 1 -->-- 2 -->-- 3 >>> is_intersection(g, 2) False graph: 1 -- 2 -- 3 | 4 >>> is_intersection(g, 2) True Parameters ---------- g : networkx DiGraph n : node id Returns ------- bool """ return len(set(g.predecessors(n) + g.successors(n))) > 2
Determine if a node is an intersection graph: 1 -->-- 2 -->-- 3 >>> is_intersection(g, 2) False graph: 1 -- 2 -- 3 | 4 >>> is_intersection(g, 2) True Parameters ---------- g : networkx DiGraph n : node id Returns ------- bool
https://github.com/Mapkin/osmgraph/blob/4f8e6466c11edbe30f1bbefc939e5613860a43b4/osmgraph/tools.py#L203-L230
lamenezes/simple-model
simple_model/models.py
LazyModel.as_dict
def as_dict(self): """ Returns the model as a dict """ if not self._is_valid: self.validate() from .converters import to_dict return to_dict(self)
python
def as_dict(self): """ Returns the model as a dict """ if not self._is_valid: self.validate() from .converters import to_dict return to_dict(self)
Returns the model as a dict
https://github.com/lamenezes/simple-model/blob/05138edd022db642ef5611ac660832993e2af0d4/simple_model/models.py#L172-L180
Mapkin/osmgraph
osmgraph/importer.py
GraphImporter.coords_callback
def coords_callback(self, data): """ Callback for nodes that have no tags """ for node_id, lon, lat in data: self.coords[node_id] = (lon, lat)
python
def coords_callback(self, data): """ Callback for nodes that have no tags """ for node_id, lon, lat in data: self.coords[node_id] = (lon, lat)
Callback for nodes that have no tags
https://github.com/Mapkin/osmgraph/blob/4f8e6466c11edbe30f1bbefc939e5613860a43b4/osmgraph/importer.py#L14-L17
Mapkin/osmgraph
osmgraph/importer.py
GraphImporter.nodes_callback
def nodes_callback(self, data): """ Callback for nodes with tags """ for node_id, tags, coords in data: # Discard the coords because they go into add_coords self.nodes[node_id] = tags
python
def nodes_callback(self, data): """ Callback for nodes with tags """ for node_id, tags, coords in data: # Discard the coords because they go into add_coords self.nodes[node_id] = tags
Callback for nodes with tags
https://github.com/Mapkin/osmgraph/blob/4f8e6466c11edbe30f1bbefc939e5613860a43b4/osmgraph/importer.py#L19-L23
Mapkin/osmgraph
osmgraph/importer.py
GraphImporter.ways_callback
def ways_callback(self, data): """ Callback for all ways """ for way_id, tags, nodes in data: # Imposm passes all ways through regardless of whether the tags # have been filtered or not. It needs to do this in order to # handle relations, but we don't care about relations at the # moment. if tags: self.ways[way_id] = (tags, nodes)
python
def ways_callback(self, data): """ Callback for all ways """ for way_id, tags, nodes in data: # Imposm passes all ways through regardless of whether the tags # have been filtered or not. It needs to do this in order to # handle relations, but we don't care about relations at the # moment. if tags: self.ways[way_id] = (tags, nodes)
Callback for all ways
https://github.com/Mapkin/osmgraph/blob/4f8e6466c11edbe30f1bbefc939e5613860a43b4/osmgraph/importer.py#L25-L33
Mapkin/osmgraph
osmgraph/importer.py
GraphImporter.get_graph
def get_graph(self, parse_direction=False): """ Return the networkx directed graph of received data """ g = nx.DiGraph() for way_id, (tags, nodes) in self.ways.items(): # If oneway is '-1', reverse the way and treat as a normal oneway if tags.get('oneway') == '-1': nodes = reversed(nodes) tags['oneway'] = 'yes' oneway = tags.get('oneway') == 'yes' for n0, n1 in tools.pairwise(nodes): g.add_edge(n0, n1, attr_dict=tags) if parse_direction: g[n0][n1]['_direction'] = 'forward' if not oneway: g.add_edge(n1, n0, attr_dict=tags) if parse_direction: g[n1][n0]['_direction'] = 'backward' g.node[n0].update(self._node_properties(n0)) g.node[n1].update(self._node_properties(n1)) return g
python
def get_graph(self, parse_direction=False): """ Return the networkx directed graph of received data """ g = nx.DiGraph() for way_id, (tags, nodes) in self.ways.items(): # If oneway is '-1', reverse the way and treat as a normal oneway if tags.get('oneway') == '-1': nodes = reversed(nodes) tags['oneway'] = 'yes' oneway = tags.get('oneway') == 'yes' for n0, n1 in tools.pairwise(nodes): g.add_edge(n0, n1, attr_dict=tags) if parse_direction: g[n0][n1]['_direction'] = 'forward' if not oneway: g.add_edge(n1, n0, attr_dict=tags) if parse_direction: g[n1][n0]['_direction'] = 'backward' g.node[n0].update(self._node_properties(n0)) g.node[n1].update(self._node_properties(n1)) return g
Return the networkx directed graph of received data
https://github.com/Mapkin/osmgraph/blob/4f8e6466c11edbe30f1bbefc939e5613860a43b4/osmgraph/importer.py#L35-L58
Mapkin/osmgraph
osmgraph/main.py
parse_file
def parse_file(filename, parse_direction=False, **kwargs): """ Return an OSM networkx graph from the input OSM file Only works with OSM xml, xml.bz2 and pbf files. This function cannot take OSM QA tile files. Use parse_qa_tile() for QA tiles. >>> graph = parse_file(filename) """ importer, parser = make_importer_parser(OSMParser, **kwargs) parser.parse(filename) return importer.get_graph(parse_direction=parse_direction)
python
def parse_file(filename, parse_direction=False, **kwargs): """ Return an OSM networkx graph from the input OSM file Only works with OSM xml, xml.bz2 and pbf files. This function cannot take OSM QA tile files. Use parse_qa_tile() for QA tiles. >>> graph = parse_file(filename) """ importer, parser = make_importer_parser(OSMParser, **kwargs) parser.parse(filename) return importer.get_graph(parse_direction=parse_direction)
Return an OSM networkx graph from the input OSM file Only works with OSM xml, xml.bz2 and pbf files. This function cannot take OSM QA tile files. Use parse_qa_tile() for QA tiles. >>> graph = parse_file(filename)
https://github.com/Mapkin/osmgraph/blob/4f8e6466c11edbe30f1bbefc939e5613860a43b4/osmgraph/main.py#L10-L23
Mapkin/osmgraph
osmgraph/main.py
parse_data
def parse_data(data, type, **kwargs): """ Return an OSM networkx graph from the input OSM data Parameters ---------- data : string type : string ('xml' or 'pbf') >>> graph = parse_data(data, 'xml') """ suffixes = { 'xml': '.osm', 'pbf': '.pbf', } try: suffix = suffixes[type] except KeyError: raise ValueError('Unknown data type "%s"' % type) fd, filename = tempfile.mkstemp(suffix=suffix) try: os.write(fd, data) os.close(fd) return parse_file(filename, **kwargs) finally: os.remove(filename)
python
def parse_data(data, type, **kwargs): """ Return an OSM networkx graph from the input OSM data Parameters ---------- data : string type : string ('xml' or 'pbf') >>> graph = parse_data(data, 'xml') """ suffixes = { 'xml': '.osm', 'pbf': '.pbf', } try: suffix = suffixes[type] except KeyError: raise ValueError('Unknown data type "%s"' % type) fd, filename = tempfile.mkstemp(suffix=suffix) try: os.write(fd, data) os.close(fd) return parse_file(filename, **kwargs) finally: os.remove(filename)
Return an OSM networkx graph from the input OSM data Parameters ---------- data : string type : string ('xml' or 'pbf') >>> graph = parse_data(data, 'xml')
https://github.com/Mapkin/osmgraph/blob/4f8e6466c11edbe30f1bbefc939e5613860a43b4/osmgraph/main.py#L26-L53
Mapkin/osmgraph
osmgraph/main.py
parse_qa_tile
def parse_qa_tile(x, y, zoom, data, parse_direction=False, **kwargs): """ Return an OSM networkx graph from the input OSM QA tile data Parameters ---------- data : string x : int tile's x coordinate y : int tile's y coordinate zoom : int tile's zoom level >>> graph = parse_qa_tile(data, 1239, 1514, 12) """ import osmqa importer, parser = make_importer_parser(osmqa.QATileParser, **kwargs) parser.parse_data(x, y, zoom, data) return importer.get_graph(parse_direction=parse_direction)
python
def parse_qa_tile(x, y, zoom, data, parse_direction=False, **kwargs): """ Return an OSM networkx graph from the input OSM QA tile data Parameters ---------- data : string x : int tile's x coordinate y : int tile's y coordinate zoom : int tile's zoom level >>> graph = parse_qa_tile(data, 1239, 1514, 12) """ import osmqa importer, parser = make_importer_parser(osmqa.QATileParser, **kwargs) parser.parse_data(x, y, zoom, data) return importer.get_graph(parse_direction=parse_direction)
Return an OSM networkx graph from the input OSM QA tile data Parameters ---------- data : string x : int tile's x coordinate y : int tile's y coordinate zoom : int tile's zoom level >>> graph = parse_qa_tile(data, 1239, 1514, 12)
https://github.com/Mapkin/osmgraph/blob/4f8e6466c11edbe30f1bbefc939e5613860a43b4/osmgraph/main.py#L56-L76
pyoceans/python-ctd
ctd/read.py
_basename
def _basename(fname): """Return file name without path.""" if not isinstance(fname, Path): fname = Path(fname) path, name, ext = fname.parent, fname.stem, fname.suffix return path, name, ext
python
def _basename(fname): """Return file name without path.""" if not isinstance(fname, Path): fname = Path(fname) path, name, ext = fname.parent, fname.stem, fname.suffix return path, name, ext
Return file name without path.
https://github.com/pyoceans/python-ctd/blob/fa9a9d02da3dfed6d1d60db0e52bbab52adfe666/ctd/read.py#L15-L20
pyoceans/python-ctd
ctd/read.py
from_bl
def from_bl(fname): """Read Seabird bottle-trip (bl) file Example ------- >>> from pathlib import Path >>> import ctd >>> data_path = Path(__file__).parents[1].joinpath("tests", "data") >>> df = ctd.from_bl(str(data_path.joinpath('bl', 'bottletest.bl'))) >>> df._metadata["time_of_reset"] datetime.datetime(2018, 6, 25, 20, 8, 55) """ df = pd.read_csv( fname, skiprows=2, parse_dates=[1], index_col=0, names=["bottle_number", "time", "startscan", "endscan"], ) df._metadata = { "time_of_reset": pd.to_datetime( linecache.getline(str(fname), 2)[6:-1] ).to_pydatetime() } return df
python
def from_bl(fname): """Read Seabird bottle-trip (bl) file Example ------- >>> from pathlib import Path >>> import ctd >>> data_path = Path(__file__).parents[1].joinpath("tests", "data") >>> df = ctd.from_bl(str(data_path.joinpath('bl', 'bottletest.bl'))) >>> df._metadata["time_of_reset"] datetime.datetime(2018, 6, 25, 20, 8, 55) """ df = pd.read_csv( fname, skiprows=2, parse_dates=[1], index_col=0, names=["bottle_number", "time", "startscan", "endscan"], ) df._metadata = { "time_of_reset": pd.to_datetime( linecache.getline(str(fname), 2)[6:-1] ).to_pydatetime() } return df
Read Seabird bottle-trip (bl) file Example ------- >>> from pathlib import Path >>> import ctd >>> data_path = Path(__file__).parents[1].joinpath("tests", "data") >>> df = ctd.from_bl(str(data_path.joinpath('bl', 'bottletest.bl'))) >>> df._metadata["time_of_reset"] datetime.datetime(2018, 6, 25, 20, 8, 55)
https://github.com/pyoceans/python-ctd/blob/fa9a9d02da3dfed6d1d60db0e52bbab52adfe666/ctd/read.py#L157-L182
pyoceans/python-ctd
ctd/read.py
from_btl
def from_btl(fname): """ DataFrame constructor to open Seabird CTD BTL-ASCII format. Examples -------- >>> from pathlib import Path >>> import ctd >>> data_path = Path(__file__).parents[1].joinpath("tests", "data") >>> bottles = ctd.from_btl(data_path.joinpath('btl', 'bottletest.btl')) """ f = _read_file(fname) metadata = _parse_seabird(f.readlines(), ftype="btl") f.seek(0) df = pd.read_fwf( f, header=None, index_col=False, names=metadata["names"], parse_dates=False, skiprows=metadata["skiprows"], ) f.close() # At this point the data frame is not correctly lined up (multiple rows # for avg, std, min, max or just avg, std, etc). # Also needs date,time,and bottle number to be converted to one per line. # Get row types, see what you have: avg, std, min, max or just avg, std. rowtypes = df[df.columns[-1]].unique() # Get times and dates which occur on second line of each bottle. dates = df.iloc[:: len(rowtypes), 1].reset_index(drop=True) times = df.iloc[1 :: len(rowtypes), 1].reset_index(drop=True) datetimes = dates + " " + times # Fill the Date column with datetimes. df.loc[:: len(rowtypes), "Date"] = datetimes.values df.loc[1 :: len(rowtypes), "Date"] = datetimes.values # Fill missing rows. df["Bottle"] = df["Bottle"].fillna(method="ffill") df["Date"] = df["Date"].fillna(method="ffill") df["Statistic"] = df["Statistic"].str.replace(r"\(|\)", "") # (avg) to avg name = _basename(fname)[1] dtypes = { "bpos": int, "pumps": bool, "flag": bool, "Bottle": int, "Scan": int, "Statistic": str, "Date": str, } for column in df.columns: if column in dtypes: df[column] = df[column].astype(dtypes[column]) else: try: df[column] = df[column].astype(float) except ValueError: warnings.warn("Could not convert %s to float." % column) df["Date"] = pd.to_datetime(df["Date"]) metadata["name"] = str(name) setattr(df, "_metadata", metadata) return df
python
def from_btl(fname): """ DataFrame constructor to open Seabird CTD BTL-ASCII format. Examples -------- >>> from pathlib import Path >>> import ctd >>> data_path = Path(__file__).parents[1].joinpath("tests", "data") >>> bottles = ctd.from_btl(data_path.joinpath('btl', 'bottletest.btl')) """ f = _read_file(fname) metadata = _parse_seabird(f.readlines(), ftype="btl") f.seek(0) df = pd.read_fwf( f, header=None, index_col=False, names=metadata["names"], parse_dates=False, skiprows=metadata["skiprows"], ) f.close() # At this point the data frame is not correctly lined up (multiple rows # for avg, std, min, max or just avg, std, etc). # Also needs date,time,and bottle number to be converted to one per line. # Get row types, see what you have: avg, std, min, max or just avg, std. rowtypes = df[df.columns[-1]].unique() # Get times and dates which occur on second line of each bottle. dates = df.iloc[:: len(rowtypes), 1].reset_index(drop=True) times = df.iloc[1 :: len(rowtypes), 1].reset_index(drop=True) datetimes = dates + " " + times # Fill the Date column with datetimes. df.loc[:: len(rowtypes), "Date"] = datetimes.values df.loc[1 :: len(rowtypes), "Date"] = datetimes.values # Fill missing rows. df["Bottle"] = df["Bottle"].fillna(method="ffill") df["Date"] = df["Date"].fillna(method="ffill") df["Statistic"] = df["Statistic"].str.replace(r"\(|\)", "") # (avg) to avg name = _basename(fname)[1] dtypes = { "bpos": int, "pumps": bool, "flag": bool, "Bottle": int, "Scan": int, "Statistic": str, "Date": str, } for column in df.columns: if column in dtypes: df[column] = df[column].astype(dtypes[column]) else: try: df[column] = df[column].astype(float) except ValueError: warnings.warn("Could not convert %s to float." % column) df["Date"] = pd.to_datetime(df["Date"]) metadata["name"] = str(name) setattr(df, "_metadata", metadata) return df
DataFrame constructor to open Seabird CTD BTL-ASCII format. Examples -------- >>> from pathlib import Path >>> import ctd >>> data_path = Path(__file__).parents[1].joinpath("tests", "data") >>> bottles = ctd.from_btl(data_path.joinpath('btl', 'bottletest.btl'))
https://github.com/pyoceans/python-ctd/blob/fa9a9d02da3dfed6d1d60db0e52bbab52adfe666/ctd/read.py#L185-L256
pyoceans/python-ctd
ctd/read.py
from_edf
def from_edf(fname): """ DataFrame constructor to open XBT EDF ASCII format. Examples -------- >>> from pathlib import Path >>> import ctd >>> data_path = Path(__file__).parents[1].joinpath("tests", "data") >>> cast = ctd.from_edf(data_path.joinpath('XBT.EDF.gz')) >>> ax = cast['temperature'].plot_cast() """ f = _read_file(fname) header, names = [], [] for k, line in enumerate(f.readlines()): line = line.strip() if line.startswith("Serial Number"): serial = line.strip().split(":")[1].strip() elif line.startswith("Latitude"): try: hemisphere = line[-1] lat = line.strip(hemisphere).split(":")[1].strip() lat = np.float_(lat.split()) if hemisphere == "S": lat = -(lat[0] + lat[1] / 60.0) elif hemisphere == "N": lat = lat[0] + lat[1] / 60.0 except (IndexError, ValueError): lat = None elif line.startswith("Longitude"): try: hemisphere = line[-1] lon = line.strip(hemisphere).split(":")[1].strip() lon = np.float_(lon.split()) if hemisphere == "W": lon = -(lon[0] + lon[1] / 60.0) elif hemisphere == "E": lon = lon[0] + lon[1] / 60.0 except (IndexError, ValueError): lon = None else: header.append(line) if line.startswith("Field"): col, unit = [l.strip().lower() for l in line.split(":")] names.append(unit.split()[0]) if line == "// Data": skiprows = k + 1 break f.seek(0) df = pd.read_csv( f, header=None, index_col=None, names=names, skiprows=skiprows, delim_whitespace=True, ) f.close() df.set_index("depth", drop=True, inplace=True) df.index.name = "Depth [m]" name = _basename(fname)[1] metadata = { "lon": lon, "lat": lat, "name": str(name), "header": "\n".join(header), "serial": serial, } setattr(df, "_metadata", metadata) return df
python
def from_edf(fname): """ DataFrame constructor to open XBT EDF ASCII format. Examples -------- >>> from pathlib import Path >>> import ctd >>> data_path = Path(__file__).parents[1].joinpath("tests", "data") >>> cast = ctd.from_edf(data_path.joinpath('XBT.EDF.gz')) >>> ax = cast['temperature'].plot_cast() """ f = _read_file(fname) header, names = [], [] for k, line in enumerate(f.readlines()): line = line.strip() if line.startswith("Serial Number"): serial = line.strip().split(":")[1].strip() elif line.startswith("Latitude"): try: hemisphere = line[-1] lat = line.strip(hemisphere).split(":")[1].strip() lat = np.float_(lat.split()) if hemisphere == "S": lat = -(lat[0] + lat[1] / 60.0) elif hemisphere == "N": lat = lat[0] + lat[1] / 60.0 except (IndexError, ValueError): lat = None elif line.startswith("Longitude"): try: hemisphere = line[-1] lon = line.strip(hemisphere).split(":")[1].strip() lon = np.float_(lon.split()) if hemisphere == "W": lon = -(lon[0] + lon[1] / 60.0) elif hemisphere == "E": lon = lon[0] + lon[1] / 60.0 except (IndexError, ValueError): lon = None else: header.append(line) if line.startswith("Field"): col, unit = [l.strip().lower() for l in line.split(":")] names.append(unit.split()[0]) if line == "// Data": skiprows = k + 1 break f.seek(0) df = pd.read_csv( f, header=None, index_col=None, names=names, skiprows=skiprows, delim_whitespace=True, ) f.close() df.set_index("depth", drop=True, inplace=True) df.index.name = "Depth [m]" name = _basename(fname)[1] metadata = { "lon": lon, "lat": lat, "name": str(name), "header": "\n".join(header), "serial": serial, } setattr(df, "_metadata", metadata) return df
DataFrame constructor to open XBT EDF ASCII format. Examples -------- >>> from pathlib import Path >>> import ctd >>> data_path = Path(__file__).parents[1].joinpath("tests", "data") >>> cast = ctd.from_edf(data_path.joinpath('XBT.EDF.gz')) >>> ax = cast['temperature'].plot_cast()
https://github.com/pyoceans/python-ctd/blob/fa9a9d02da3dfed6d1d60db0e52bbab52adfe666/ctd/read.py#L259-L332
pyoceans/python-ctd
ctd/read.py
from_cnv
def from_cnv(fname): """ DataFrame constructor to open Seabird CTD CNV-ASCII format. Examples -------- >>> from pathlib import Path >>> import ctd >>> data_path = Path(__file__).parents[1].joinpath("tests", "data") >>> cast = ctd.from_cnv(data_path.joinpath('CTD_big.cnv.bz2')) >>> downcast, upcast = cast.split() >>> ax = downcast['t090C'].plot_cast() """ f = _read_file(fname) metadata = _parse_seabird(f.readlines(), ftype="cnv") f.seek(0) df = pd.read_fwf( f, header=None, index_col=None, names=metadata["names"], skiprows=metadata["skiprows"], delim_whitespace=True, widths=[11] * len(metadata["names"]), ) f.close() key_set = False prkeys = ["prDM", "prdM", "pr"] for prkey in prkeys: try: df.set_index(prkey, drop=True, inplace=True) key_set = True except KeyError: continue if not key_set: raise KeyError( f"Could not find pressure field (supported names are {prkeys})." ) df.index.name = "Pressure [dbar]" name = _basename(fname)[1] dtypes = {"bpos": int, "pumps": bool, "flag": bool} for column in df.columns: if column in dtypes: df[column] = df[column].astype(dtypes[column]) else: try: df[column] = df[column].astype(float) except ValueError: warnings.warn("Could not convert %s to float." % column) metadata["name"] = str(name) setattr(df, "_metadata", metadata) return df
python
def from_cnv(fname): """ DataFrame constructor to open Seabird CTD CNV-ASCII format. Examples -------- >>> from pathlib import Path >>> import ctd >>> data_path = Path(__file__).parents[1].joinpath("tests", "data") >>> cast = ctd.from_cnv(data_path.joinpath('CTD_big.cnv.bz2')) >>> downcast, upcast = cast.split() >>> ax = downcast['t090C'].plot_cast() """ f = _read_file(fname) metadata = _parse_seabird(f.readlines(), ftype="cnv") f.seek(0) df = pd.read_fwf( f, header=None, index_col=None, names=metadata["names"], skiprows=metadata["skiprows"], delim_whitespace=True, widths=[11] * len(metadata["names"]), ) f.close() key_set = False prkeys = ["prDM", "prdM", "pr"] for prkey in prkeys: try: df.set_index(prkey, drop=True, inplace=True) key_set = True except KeyError: continue if not key_set: raise KeyError( f"Could not find pressure field (supported names are {prkeys})." ) df.index.name = "Pressure [dbar]" name = _basename(fname)[1] dtypes = {"bpos": int, "pumps": bool, "flag": bool} for column in df.columns: if column in dtypes: df[column] = df[column].astype(dtypes[column]) else: try: df[column] = df[column].astype(float) except ValueError: warnings.warn("Could not convert %s to float." % column) metadata["name"] = str(name) setattr(df, "_metadata", metadata) return df
DataFrame constructor to open Seabird CTD CNV-ASCII format. Examples -------- >>> from pathlib import Path >>> import ctd >>> data_path = Path(__file__).parents[1].joinpath("tests", "data") >>> cast = ctd.from_cnv(data_path.joinpath('CTD_big.cnv.bz2')) >>> downcast, upcast = cast.split() >>> ax = downcast['t090C'].plot_cast()
https://github.com/pyoceans/python-ctd/blob/fa9a9d02da3dfed6d1d60db0e52bbab52adfe666/ctd/read.py#L335-L392
pyoceans/python-ctd
ctd/read.py
from_fsi
def from_fsi(fname, skiprows=9): """ DataFrame constructor to open Falmouth Scientific, Inc. (FSI) CTD ASCII format. Examples -------- >>> from pathlib import Path >>> import ctd >>> data_path = Path(__file__).parents[1].joinpath("tests", "data") >>> cast = ctd.from_fsi(data_path.joinpath('FSI.txt.gz')) >>> downcast, upcast = cast.split() >>> ax = downcast['TEMP'].plot_cast() """ f = _read_file(fname) df = pd.read_csv( f, header="infer", index_col=None, skiprows=skiprows, dtype=float, delim_whitespace=True, ) f.close() df.set_index("PRES", drop=True, inplace=True) df.index.name = "Pressure [dbar]" metadata = {"name": str(fname)} setattr(df, "_metadata", metadata) return df
python
def from_fsi(fname, skiprows=9): """ DataFrame constructor to open Falmouth Scientific, Inc. (FSI) CTD ASCII format. Examples -------- >>> from pathlib import Path >>> import ctd >>> data_path = Path(__file__).parents[1].joinpath("tests", "data") >>> cast = ctd.from_fsi(data_path.joinpath('FSI.txt.gz')) >>> downcast, upcast = cast.split() >>> ax = downcast['TEMP'].plot_cast() """ f = _read_file(fname) df = pd.read_csv( f, header="infer", index_col=None, skiprows=skiprows, dtype=float, delim_whitespace=True, ) f.close() df.set_index("PRES", drop=True, inplace=True) df.index.name = "Pressure [dbar]" metadata = {"name": str(fname)} setattr(df, "_metadata", metadata) return df
DataFrame constructor to open Falmouth Scientific, Inc. (FSI) CTD ASCII format. Examples -------- >>> from pathlib import Path >>> import ctd >>> data_path = Path(__file__).parents[1].joinpath("tests", "data") >>> cast = ctd.from_fsi(data_path.joinpath('FSI.txt.gz')) >>> downcast, upcast = cast.split() >>> ax = downcast['TEMP'].plot_cast()
https://github.com/pyoceans/python-ctd/blob/fa9a9d02da3dfed6d1d60db0e52bbab52adfe666/ctd/read.py#L395-L425
pyoceans/python-ctd
ctd/read.py
rosette_summary
def rosette_summary(fname): """ Make a BTL (bottle) file from a ROS (bottle log) file. More control for the averaging process and at which step we want to perform this averaging eliminating the need to read the data into SBE Software again after pre-processing. NOTE: Do not run LoopEdit on the upcast! Examples -------- >>> from pathlib import Path >>> import ctd >>> data_path = Path(__file__).parents[1].joinpath("tests", "data") >>> fname = data_path.joinpath('CTD/g01l01s01.ros') >>> ros = ctd.rosette_summary(fname) >>> ros = ros.groupby(ros.index).mean() >>> ros.pressure.values.astype(int) array([835, 806, 705, 604, 503, 404, 303, 201, 151, 100, 51, 1]) """ ros = from_cnv(fname) ros["pressure"] = ros.index.values.astype(float) ros["nbf"] = ros["nbf"].astype(int) ros.set_index("nbf", drop=True, inplace=True, verify_integrity=False) return ros
python
def rosette_summary(fname): """ Make a BTL (bottle) file from a ROS (bottle log) file. More control for the averaging process and at which step we want to perform this averaging eliminating the need to read the data into SBE Software again after pre-processing. NOTE: Do not run LoopEdit on the upcast! Examples -------- >>> from pathlib import Path >>> import ctd >>> data_path = Path(__file__).parents[1].joinpath("tests", "data") >>> fname = data_path.joinpath('CTD/g01l01s01.ros') >>> ros = ctd.rosette_summary(fname) >>> ros = ros.groupby(ros.index).mean() >>> ros.pressure.values.astype(int) array([835, 806, 705, 604, 503, 404, 303, 201, 151, 100, 51, 1]) """ ros = from_cnv(fname) ros["pressure"] = ros.index.values.astype(float) ros["nbf"] = ros["nbf"].astype(int) ros.set_index("nbf", drop=True, inplace=True, verify_integrity=False) return ros
Make a BTL (bottle) file from a ROS (bottle log) file. More control for the averaging process and at which step we want to perform this averaging eliminating the need to read the data into SBE Software again after pre-processing. NOTE: Do not run LoopEdit on the upcast! Examples -------- >>> from pathlib import Path >>> import ctd >>> data_path = Path(__file__).parents[1].joinpath("tests", "data") >>> fname = data_path.joinpath('CTD/g01l01s01.ros') >>> ros = ctd.rosette_summary(fname) >>> ros = ros.groupby(ros.index).mean() >>> ros.pressure.values.astype(int) array([835, 806, 705, 604, 503, 404, 303, 201, 151, 100, 51, 1])
https://github.com/pyoceans/python-ctd/blob/fa9a9d02da3dfed6d1d60db0e52bbab52adfe666/ctd/read.py#L428-L453
pyoceans/python-ctd
ctd/extras.py
_extrap1d
def _extrap1d(interpolator): """ http://stackoverflow.com/questions/2745329/ How to make scipy.interpolate return an extrapolated result beyond the input range. """ xs, ys = interpolator.x, interpolator.y def pointwise(x): if x < xs[0]: return ys[0] + (x - xs[0]) * (ys[1] - ys[0]) / (xs[1] - xs[0]) elif x > xs[-1]: return ys[-1] + (x - xs[-1]) * (ys[-1] - ys[-2]) / ( xs[-1] - xs[-2] ) else: return interpolator(x) def ufunclike(xs): return np.array(list(map(pointwise, np.array(xs)))) return ufunclike
python
def _extrap1d(interpolator): """ http://stackoverflow.com/questions/2745329/ How to make scipy.interpolate return an extrapolated result beyond the input range. """ xs, ys = interpolator.x, interpolator.y def pointwise(x): if x < xs[0]: return ys[0] + (x - xs[0]) * (ys[1] - ys[0]) / (xs[1] - xs[0]) elif x > xs[-1]: return ys[-1] + (x - xs[-1]) * (ys[-1] - ys[-2]) / ( xs[-1] - xs[-2] ) else: return interpolator(x) def ufunclike(xs): return np.array(list(map(pointwise, np.array(xs)))) return ufunclike
http://stackoverflow.com/questions/2745329/ How to make scipy.interpolate return an extrapolated result beyond the input range.
https://github.com/pyoceans/python-ctd/blob/fa9a9d02da3dfed6d1d60db0e52bbab52adfe666/ctd/extras.py#L7-L29
pyoceans/python-ctd
ctd/extras.py
extrap_sec
def extrap_sec(data, dist, depth, w1=1.0, w2=0): """ Extrapolates `data` to zones where the shallow stations are shadowed by the deep stations. The shadow region usually cannot be extrapolates via linear interpolation. The extrapolation is applied using the gradients of the `data` at a certain level. Parameters ---------- data : array_like Data to be extrapolated dist : array_like Stations distance fd : float Decay factor [0-1] Returns ------- Sec_extrap : array_like Extrapolated variable """ from scipy.interpolate import interp1d new_data1 = [] for row in data: mask = ~np.isnan(row) if mask.any(): y = row[mask] if y.size == 1: row = np.repeat(y, len(mask)) else: x = dist[mask] f_i = interp1d(x, y) f_x = _extrap1d(f_i) row = f_x(dist) new_data1.append(row) new_data2 = [] for col in data.T: mask = ~np.isnan(col) if mask.any(): y = col[mask] if y.size == 1: col = np.repeat(y, len(mask)) else: z = depth[mask] f_i = interp1d(z, y) f_z = _extrap1d(f_i) col = f_z(depth) new_data2.append(col) new_data = np.array(new_data1) * w1 + np.array(new_data2).T * w2 return new_data
python
def extrap_sec(data, dist, depth, w1=1.0, w2=0): """ Extrapolates `data` to zones where the shallow stations are shadowed by the deep stations. The shadow region usually cannot be extrapolates via linear interpolation. The extrapolation is applied using the gradients of the `data` at a certain level. Parameters ---------- data : array_like Data to be extrapolated dist : array_like Stations distance fd : float Decay factor [0-1] Returns ------- Sec_extrap : array_like Extrapolated variable """ from scipy.interpolate import interp1d new_data1 = [] for row in data: mask = ~np.isnan(row) if mask.any(): y = row[mask] if y.size == 1: row = np.repeat(y, len(mask)) else: x = dist[mask] f_i = interp1d(x, y) f_x = _extrap1d(f_i) row = f_x(dist) new_data1.append(row) new_data2 = [] for col in data.T: mask = ~np.isnan(col) if mask.any(): y = col[mask] if y.size == 1: col = np.repeat(y, len(mask)) else: z = depth[mask] f_i = interp1d(z, y) f_z = _extrap1d(f_i) col = f_z(depth) new_data2.append(col) new_data = np.array(new_data1) * w1 + np.array(new_data2).T * w2 return new_data
Extrapolates `data` to zones where the shallow stations are shadowed by the deep stations. The shadow region usually cannot be extrapolates via linear interpolation. The extrapolation is applied using the gradients of the `data` at a certain level. Parameters ---------- data : array_like Data to be extrapolated dist : array_like Stations distance fd : float Decay factor [0-1] Returns ------- Sec_extrap : array_like Extrapolated variable
https://github.com/pyoceans/python-ctd/blob/fa9a9d02da3dfed6d1d60db0e52bbab52adfe666/ctd/extras.py#L37-L93
pyoceans/python-ctd
ctd/extras.py
gen_topomask
def gen_topomask(h, lon, lat, dx=1.0, kind="linear", plot=False): """ Generates a topography mask from an oceanographic transect taking the deepest CTD scan as the depth of each station. Inputs ------ h : array Pressure of the deepest CTD scan for each station [dbar]. lons : array Longitude of each station [decimal degrees east]. lat : Latitude of each station. [decimal degrees north]. dx : float Horizontal resolution of the output arrays [km]. kind : string, optional Type of the interpolation to be performed. See scipy.interpolate.interp1d documentation for details. plot : bool Whether to plot mask for visualization. Outputs ------- xm : array Horizontal distances [km]. hm : array Local depth [m]. Author ------ André Palóczy Filho ([email protected]) -- October/2012 """ import gsw from scipy.interpolate import interp1d h, lon, lat = list(map(np.asanyarray, (h, lon, lat))) # Distance in km. x = np.append(0, np.cumsum(gsw.distance(lon, lat)[0] / 1e3)) h = -gsw.z_from_p(h, lat.mean()) Ih = interp1d(x, h, kind=kind, bounds_error=False, fill_value=h[-1]) xm = np.arange(0, x.max() + dx, dx) hm = Ih(xm) return xm, hm
python
def gen_topomask(h, lon, lat, dx=1.0, kind="linear", plot=False): """ Generates a topography mask from an oceanographic transect taking the deepest CTD scan as the depth of each station. Inputs ------ h : array Pressure of the deepest CTD scan for each station [dbar]. lons : array Longitude of each station [decimal degrees east]. lat : Latitude of each station. [decimal degrees north]. dx : float Horizontal resolution of the output arrays [km]. kind : string, optional Type of the interpolation to be performed. See scipy.interpolate.interp1d documentation for details. plot : bool Whether to plot mask for visualization. Outputs ------- xm : array Horizontal distances [km]. hm : array Local depth [m]. Author ------ André Palóczy Filho ([email protected]) -- October/2012 """ import gsw from scipy.interpolate import interp1d h, lon, lat = list(map(np.asanyarray, (h, lon, lat))) # Distance in km. x = np.append(0, np.cumsum(gsw.distance(lon, lat)[0] / 1e3)) h = -gsw.z_from_p(h, lat.mean()) Ih = interp1d(x, h, kind=kind, bounds_error=False, fill_value=h[-1]) xm = np.arange(0, x.max() + dx, dx) hm = Ih(xm) return xm, hm
Generates a topography mask from an oceanographic transect taking the deepest CTD scan as the depth of each station. Inputs ------ h : array Pressure of the deepest CTD scan for each station [dbar]. lons : array Longitude of each station [decimal degrees east]. lat : Latitude of each station. [decimal degrees north]. dx : float Horizontal resolution of the output arrays [km]. kind : string, optional Type of the interpolation to be performed. See scipy.interpolate.interp1d documentation for details. plot : bool Whether to plot mask for visualization. Outputs ------- xm : array Horizontal distances [km]. hm : array Local depth [m]. Author ------ André Palóczy Filho ([email protected]) -- October/2012
https://github.com/pyoceans/python-ctd/blob/fa9a9d02da3dfed6d1d60db0e52bbab52adfe666/ctd/extras.py#L96-L140
pyoceans/python-ctd
ctd/extras.py
cell_thermal_mass
def cell_thermal_mass(temperature, conductivity): """ Sample interval is measured in seconds. Temperature in degrees. CTM is calculated in S/m. """ alpha = 0.03 # Thermal anomaly amplitude. beta = 1.0 / 7 # Thermal anomaly time constant (1/beta). sample_interval = 1 / 15.0 a = 2 * alpha / (sample_interval * beta + 2) b = 1 - (2 * a / alpha) dCodT = 0.1 * (1 + 0.006 * [temperature - 20]) dT = np.diff(temperature) ctm = -1.0 * b * conductivity + a * (dCodT) * dT # [S/m] return ctm
python
def cell_thermal_mass(temperature, conductivity): """ Sample interval is measured in seconds. Temperature in degrees. CTM is calculated in S/m. """ alpha = 0.03 # Thermal anomaly amplitude. beta = 1.0 / 7 # Thermal anomaly time constant (1/beta). sample_interval = 1 / 15.0 a = 2 * alpha / (sample_interval * beta + 2) b = 1 - (2 * a / alpha) dCodT = 0.1 * (1 + 0.006 * [temperature - 20]) dT = np.diff(temperature) ctm = -1.0 * b * conductivity + a * (dCodT) * dT # [S/m] return ctm
Sample interval is measured in seconds. Temperature in degrees. CTM is calculated in S/m.
https://github.com/pyoceans/python-ctd/blob/fa9a9d02da3dfed6d1d60db0e52bbab52adfe666/ctd/extras.py#L243-L260
pyoceans/python-ctd
ctd/extras.py
barrier_layer_thickness
def barrier_layer_thickness(SA, CT): """ Compute the thickness of water separating the mixed surface layer from the thermocline. A more precise definition would be the difference between mixed layer depth (MLD) calculated from temperature minus the mixed layer depth calculated using density. """ import gsw sigma_theta = gsw.sigma0(SA, CT) mask = mixed_layer_depth(CT) mld = np.where(mask)[0][-1] sig_surface = sigma_theta[0] sig_bottom_mld = gsw.sigma0(SA[0], CT[mld]) d_sig_t = sig_surface - sig_bottom_mld d_sig = sigma_theta - sig_bottom_mld mask = d_sig < d_sig_t # Barrier layer. return Series(mask, index=SA.index, name="BLT")
python
def barrier_layer_thickness(SA, CT): """ Compute the thickness of water separating the mixed surface layer from the thermocline. A more precise definition would be the difference between mixed layer depth (MLD) calculated from temperature minus the mixed layer depth calculated using density. """ import gsw sigma_theta = gsw.sigma0(SA, CT) mask = mixed_layer_depth(CT) mld = np.where(mask)[0][-1] sig_surface = sigma_theta[0] sig_bottom_mld = gsw.sigma0(SA[0], CT[mld]) d_sig_t = sig_surface - sig_bottom_mld d_sig = sigma_theta - sig_bottom_mld mask = d_sig < d_sig_t # Barrier layer. return Series(mask, index=SA.index, name="BLT")
Compute the thickness of water separating the mixed surface layer from the thermocline. A more precise definition would be the difference between mixed layer depth (MLD) calculated from temperature minus the mixed layer depth calculated using density.
https://github.com/pyoceans/python-ctd/blob/fa9a9d02da3dfed6d1d60db0e52bbab52adfe666/ctd/extras.py#L271-L289
pyoceans/python-ctd
ctd/plotting.py
plot_cast
def plot_cast(df, secondary_y=False, label=None, *args, **kwargs): """ Plot a CTD variable with the index in the y-axis instead of x-axis. """ ax = kwargs.pop("ax", None) fignums = plt.get_fignums() if ax is None and not fignums: ax = plt.axes() fig = ax.get_figure() fig.set_size_inches((5.25, 6.75)) else: ax = plt.gca() fig = plt.gcf() figsize = kwargs.pop("figsize", fig.get_size_inches()) fig.set_size_inches(figsize) y_inverted = False if not getattr(ax, "y_inverted", False): setattr(ax, "y_inverted", True) y_inverted = True if secondary_y: ax = ax.twiny() xlabel = getattr(df, "name", None) ylabel = getattr(df.index, "name", None) if isinstance(df, pd.DataFrame): labels = label if label else df.columns for k, (col, series) in enumerate(df.iteritems()): ax.plot(series, series.index, label=labels[k]) elif isinstance(df, pd.Series): label = label if label else str(df.name) ax.plot(df.values, df.index, label=label, *args, **kwargs) ax.set_ylabel(ylabel) ax.set_xlabel(xlabel) if y_inverted and not secondary_y: ax.invert_yaxis() return ax
python
def plot_cast(df, secondary_y=False, label=None, *args, **kwargs): """ Plot a CTD variable with the index in the y-axis instead of x-axis. """ ax = kwargs.pop("ax", None) fignums = plt.get_fignums() if ax is None and not fignums: ax = plt.axes() fig = ax.get_figure() fig.set_size_inches((5.25, 6.75)) else: ax = plt.gca() fig = plt.gcf() figsize = kwargs.pop("figsize", fig.get_size_inches()) fig.set_size_inches(figsize) y_inverted = False if not getattr(ax, "y_inverted", False): setattr(ax, "y_inverted", True) y_inverted = True if secondary_y: ax = ax.twiny() xlabel = getattr(df, "name", None) ylabel = getattr(df.index, "name", None) if isinstance(df, pd.DataFrame): labels = label if label else df.columns for k, (col, series) in enumerate(df.iteritems()): ax.plot(series, series.index, label=labels[k]) elif isinstance(df, pd.Series): label = label if label else str(df.name) ax.plot(df.values, df.index, label=label, *args, **kwargs) ax.set_ylabel(ylabel) ax.set_xlabel(xlabel) if y_inverted and not secondary_y: ax.invert_yaxis() return ax
Plot a CTD variable with the index in the y-axis instead of x-axis.
https://github.com/pyoceans/python-ctd/blob/fa9a9d02da3dfed6d1d60db0e52bbab52adfe666/ctd/plotting.py#L8-L50
pyoceans/python-ctd
ctd/processing.py
_rolling_window
def _rolling_window(data, block): """ http://stackoverflow.com/questions/4936620/ Using strides for an efficient moving average filter. """ shape = data.shape[:-1] + (data.shape[-1] - block + 1, block) strides = data.strides + (data.strides[-1],) return np.lib.stride_tricks.as_strided(data, shape=shape, strides=strides)
python
def _rolling_window(data, block): """ http://stackoverflow.com/questions/4936620/ Using strides for an efficient moving average filter. """ shape = data.shape[:-1] + (data.shape[-1] - block + 1, block) strides = data.strides + (data.strides[-1],) return np.lib.stride_tricks.as_strided(data, shape=shape, strides=strides)
http://stackoverflow.com/questions/4936620/ Using strides for an efficient moving average filter.
https://github.com/pyoceans/python-ctd/blob/fa9a9d02da3dfed6d1d60db0e52bbab52adfe666/ctd/processing.py#L7-L15
pyoceans/python-ctd
ctd/processing.py
split
def split(df): """Returns a tuple with down/up-cast.""" idx = df.index.argmax() + 1 down = df.iloc[:idx] # Reverse index to orient it as a CTD cast. up = df.iloc[idx:][::-1] return down, up
python
def split(df): """Returns a tuple with down/up-cast.""" idx = df.index.argmax() + 1 down = df.iloc[:idx] # Reverse index to orient it as a CTD cast. up = df.iloc[idx:][::-1] return down, up
Returns a tuple with down/up-cast.
https://github.com/pyoceans/python-ctd/blob/fa9a9d02da3dfed6d1d60db0e52bbab52adfe666/ctd/processing.py#L27-L33
pyoceans/python-ctd
ctd/processing.py
lp_filter
def lp_filter(df, sample_rate=24.0, time_constant=0.15): """ Filter a series with `time_constant` (use 0.15 s for pressure), and for a signal of `sample_rate` in Hertz (24 Hz for 911+). NOTE: 911+ systems do not require filter for temperature nor salinity. Examples -------- >>> from pathlib import Path >>> import matplotlib.pyplot as plt >>> import ctd >>> data_path = Path(__file__).parents[1].joinpath("tests", "data") >>> raw = ctd.from_cnv(data_path.joinpath('CTD-spiked-unfiltered.cnv.bz2')) >>> prc = ctd.from_cnv(data_path.joinpath('CTD-spiked-filtered.cnv.bz2')) >>> kw = {"sample_rate": 24.0, "time_constant": 0.15} >>> original = prc.index.values >>> unfiltered = raw.index.values >>> filtered = raw.lp_filter(**kw).index.values >>> fig, ax = plt.subplots() >>> l1, = ax.plot(original, 'k', label='original') >>> l2, = ax.plot(unfiltered, 'r', label='unfiltered') >>> l3, = ax.plot(filtered, 'g', label='filtered') >>> leg = ax.legend() Notes ----- https://scipy-cookbook.readthedocs.io/items/FIRFilter.html """ from scipy import signal # Butter is closer to what SBE is doing with their cosine filter. Wn = (1.0 / time_constant) / (sample_rate * 2.0) b, a = signal.butter(2, Wn, "low") new_df = df.copy() new_df.index = signal.filtfilt(b, a, df.index.values) return new_df
python
def lp_filter(df, sample_rate=24.0, time_constant=0.15): """ Filter a series with `time_constant` (use 0.15 s for pressure), and for a signal of `sample_rate` in Hertz (24 Hz for 911+). NOTE: 911+ systems do not require filter for temperature nor salinity. Examples -------- >>> from pathlib import Path >>> import matplotlib.pyplot as plt >>> import ctd >>> data_path = Path(__file__).parents[1].joinpath("tests", "data") >>> raw = ctd.from_cnv(data_path.joinpath('CTD-spiked-unfiltered.cnv.bz2')) >>> prc = ctd.from_cnv(data_path.joinpath('CTD-spiked-filtered.cnv.bz2')) >>> kw = {"sample_rate": 24.0, "time_constant": 0.15} >>> original = prc.index.values >>> unfiltered = raw.index.values >>> filtered = raw.lp_filter(**kw).index.values >>> fig, ax = plt.subplots() >>> l1, = ax.plot(original, 'k', label='original') >>> l2, = ax.plot(unfiltered, 'r', label='unfiltered') >>> l3, = ax.plot(filtered, 'g', label='filtered') >>> leg = ax.legend() Notes ----- https://scipy-cookbook.readthedocs.io/items/FIRFilter.html """ from scipy import signal # Butter is closer to what SBE is doing with their cosine filter. Wn = (1.0 / time_constant) / (sample_rate * 2.0) b, a = signal.butter(2, Wn, "low") new_df = df.copy() new_df.index = signal.filtfilt(b, a, df.index.values) return new_df
Filter a series with `time_constant` (use 0.15 s for pressure), and for a signal of `sample_rate` in Hertz (24 Hz for 911+). NOTE: 911+ systems do not require filter for temperature nor salinity. Examples -------- >>> from pathlib import Path >>> import matplotlib.pyplot as plt >>> import ctd >>> data_path = Path(__file__).parents[1].joinpath("tests", "data") >>> raw = ctd.from_cnv(data_path.joinpath('CTD-spiked-unfiltered.cnv.bz2')) >>> prc = ctd.from_cnv(data_path.joinpath('CTD-spiked-filtered.cnv.bz2')) >>> kw = {"sample_rate": 24.0, "time_constant": 0.15} >>> original = prc.index.values >>> unfiltered = raw.index.values >>> filtered = raw.lp_filter(**kw).index.values >>> fig, ax = plt.subplots() >>> l1, = ax.plot(original, 'k', label='original') >>> l2, = ax.plot(unfiltered, 'r', label='unfiltered') >>> l3, = ax.plot(filtered, 'g', label='filtered') >>> leg = ax.legend() Notes ----- https://scipy-cookbook.readthedocs.io/items/FIRFilter.html
https://github.com/pyoceans/python-ctd/blob/fa9a9d02da3dfed6d1d60db0e52bbab52adfe666/ctd/processing.py#L38-L75
pyoceans/python-ctd
ctd/processing.py
press_check
def press_check(df): """ Remove pressure reversals from the index. """ new_df = df.copy() press = new_df.copy().index.values ref = press[0] inversions = np.diff(np.r_[press, press[-1]]) < 0 mask = np.zeros_like(inversions) for k, p in enumerate(inversions): if p: ref = press[k] cut = press[k + 1 :] < ref mask[k + 1 :][cut] = True new_df[mask] = np.NaN return new_df
python
def press_check(df): """ Remove pressure reversals from the index. """ new_df = df.copy() press = new_df.copy().index.values ref = press[0] inversions = np.diff(np.r_[press, press[-1]]) < 0 mask = np.zeros_like(inversions) for k, p in enumerate(inversions): if p: ref = press[k] cut = press[k + 1 :] < ref mask[k + 1 :][cut] = True new_df[mask] = np.NaN return new_df
Remove pressure reversals from the index.
https://github.com/pyoceans/python-ctd/blob/fa9a9d02da3dfed6d1d60db0e52bbab52adfe666/ctd/processing.py#L80-L97
pyoceans/python-ctd
ctd/processing.py
bindata
def bindata(df, delta=1.0, method="average"): """ Bin average the index (usually pressure) to a given interval (default delta = 1). """ start = np.floor(df.index[0]) stop = np.ceil(df.index[-1]) new_index = np.arange(start, stop, delta) binned = pd.cut(df.index, bins=new_index) if method == "average": new_df = df.groupby(binned).mean() new_df.index = new_index[:-1] elif method == "interpolate": raise NotImplementedError( "Bin-average via interpolation method is not Implemented yet." ) else: raise ValueError( f"Expected method `average` or `interpolate`, but got {method}." ) return new_df
python
def bindata(df, delta=1.0, method="average"): """ Bin average the index (usually pressure) to a given interval (default delta = 1). """ start = np.floor(df.index[0]) stop = np.ceil(df.index[-1]) new_index = np.arange(start, stop, delta) binned = pd.cut(df.index, bins=new_index) if method == "average": new_df = df.groupby(binned).mean() new_df.index = new_index[:-1] elif method == "interpolate": raise NotImplementedError( "Bin-average via interpolation method is not Implemented yet." ) else: raise ValueError( f"Expected method `average` or `interpolate`, but got {method}." ) return new_df
Bin average the index (usually pressure) to a given interval (default delta = 1).
https://github.com/pyoceans/python-ctd/blob/fa9a9d02da3dfed6d1d60db0e52bbab52adfe666/ctd/processing.py#L102-L123
pyoceans/python-ctd
ctd/processing.py
_despike
def _despike(series, n1, n2, block, keep): """ Wild Edit Seabird-like function. Passes with Standard deviation `n1` and `n2` with window size `block`. """ data = series.values.astype(float).copy() roll = _rolling_window(data, block) roll = ma.masked_invalid(roll) std = n1 * roll.std(axis=1) mean = roll.mean(axis=1) # Use the last value to fill-up. std = np.r_[std, np.tile(std[-1], block - 1)] mean = np.r_[mean, np.tile(mean[-1], block - 1)] mask = np.abs(data - mean.filled(fill_value=np.NaN)) > std.filled( fill_value=np.NaN ) data[mask] = np.NaN # Pass two recompute the mean and std without the flagged values from pass # one and removed the flagged data. roll = _rolling_window(data, block) roll = ma.masked_invalid(roll) std = n2 * roll.std(axis=1) mean = roll.mean(axis=1) # Use the last value to fill-up. std = np.r_[std, np.tile(std[-1], block - 1)] mean = np.r_[mean, np.tile(mean[-1], block - 1)] values = series.values.astype(float) mask = np.abs(values - mean.filled(fill_value=np.NaN)) > std.filled( fill_value=np.NaN ) clean = series.astype(float).copy() clean[mask] = np.NaN return clean
python
def _despike(series, n1, n2, block, keep): """ Wild Edit Seabird-like function. Passes with Standard deviation `n1` and `n2` with window size `block`. """ data = series.values.astype(float).copy() roll = _rolling_window(data, block) roll = ma.masked_invalid(roll) std = n1 * roll.std(axis=1) mean = roll.mean(axis=1) # Use the last value to fill-up. std = np.r_[std, np.tile(std[-1], block - 1)] mean = np.r_[mean, np.tile(mean[-1], block - 1)] mask = np.abs(data - mean.filled(fill_value=np.NaN)) > std.filled( fill_value=np.NaN ) data[mask] = np.NaN # Pass two recompute the mean and std without the flagged values from pass # one and removed the flagged data. roll = _rolling_window(data, block) roll = ma.masked_invalid(roll) std = n2 * roll.std(axis=1) mean = roll.mean(axis=1) # Use the last value to fill-up. std = np.r_[std, np.tile(std[-1], block - 1)] mean = np.r_[mean, np.tile(mean[-1], block - 1)] values = series.values.astype(float) mask = np.abs(values - mean.filled(fill_value=np.NaN)) > std.filled( fill_value=np.NaN ) clean = series.astype(float).copy() clean[mask] = np.NaN return clean
Wild Edit Seabird-like function. Passes with Standard deviation `n1` and `n2` with window size `block`.
https://github.com/pyoceans/python-ctd/blob/fa9a9d02da3dfed6d1d60db0e52bbab52adfe666/ctd/processing.py#L126-L162
pyoceans/python-ctd
ctd/processing.py
despike
def despike(df, n1=2, n2=20, block=100, keep=0): """ Wild Edit Seabird-like function. Passes with Standard deviation `n1` and `n2` with window size `block`. """ if isinstance(df, pd.Series): new_df = _despike(df, n1=n1, n2=n2, block=block, keep=keep) else: new_df = df.apply(_despike, n1=n1, n2=n2, block=block, keep=keep) return new_df
python
def despike(df, n1=2, n2=20, block=100, keep=0): """ Wild Edit Seabird-like function. Passes with Standard deviation `n1` and `n2` with window size `block`. """ if isinstance(df, pd.Series): new_df = _despike(df, n1=n1, n2=n2, block=block, keep=keep) else: new_df = df.apply(_despike, n1=n1, n2=n2, block=block, keep=keep) return new_df
Wild Edit Seabird-like function. Passes with Standard deviation `n1` and `n2` with window size `block`.
https://github.com/pyoceans/python-ctd/blob/fa9a9d02da3dfed6d1d60db0e52bbab52adfe666/ctd/processing.py#L167-L177
pyoceans/python-ctd
ctd/processing.py
_smooth
def _smooth(series, window_len, window): """Smooth the data using a window with requested size.""" windows = { "flat": np.ones, "hanning": np.hanning, "hamming": np.hamming, "bartlett": np.bartlett, "blackman": np.blackman, } data = series.values.copy() if window_len < 3: return pd.Series(data, index=series.index, name=series.name) if window not in list(windows.keys()): raise ValueError( """window must be one of 'flat', 'hanning', 'hamming', 'bartlett', 'blackman'""" ) s = np.r_[ 2 * data[0] - data[window_len:1:-1], data, 2 * data[-1] - data[-1:-window_len:-1], ] w = windows[window](window_len) data = np.convolve(w / w.sum(), s, mode="same") data = data[window_len - 1 : -window_len + 1] return pd.Series(data, index=series.index, name=series.name)
python
def _smooth(series, window_len, window): """Smooth the data using a window with requested size.""" windows = { "flat": np.ones, "hanning": np.hanning, "hamming": np.hamming, "bartlett": np.bartlett, "blackman": np.blackman, } data = series.values.copy() if window_len < 3: return pd.Series(data, index=series.index, name=series.name) if window not in list(windows.keys()): raise ValueError( """window must be one of 'flat', 'hanning', 'hamming', 'bartlett', 'blackman'""" ) s = np.r_[ 2 * data[0] - data[window_len:1:-1], data, 2 * data[-1] - data[-1:-window_len:-1], ] w = windows[window](window_len) data = np.convolve(w / w.sum(), s, mode="same") data = data[window_len - 1 : -window_len + 1] return pd.Series(data, index=series.index, name=series.name)
Smooth the data using a window with requested size.
https://github.com/pyoceans/python-ctd/blob/fa9a9d02da3dfed6d1d60db0e52bbab52adfe666/ctd/processing.py#L180-L211
pyoceans/python-ctd
ctd/processing.py
smooth
def smooth(df, window_len=11, window="hanning"): """Smooth the data using a window with requested size.""" if isinstance(df, pd.Series): new_df = _smooth(df, window_len=window_len, window=window) else: new_df = df.apply(_smooth, window_len=window_len, window=window) return new_df
python
def smooth(df, window_len=11, window="hanning"): """Smooth the data using a window with requested size.""" if isinstance(df, pd.Series): new_df = _smooth(df, window_len=window_len, window=window) else: new_df = df.apply(_smooth, window_len=window_len, window=window) return new_df
Smooth the data using a window with requested size.
https://github.com/pyoceans/python-ctd/blob/fa9a9d02da3dfed6d1d60db0e52bbab52adfe666/ctd/processing.py#L216-L222
mayhewj/greenstalk
greenstalk.py
Client.put
def put(self, body: Body, priority: int = DEFAULT_PRIORITY, delay: int = DEFAULT_DELAY, ttr: int = DEFAULT_TTR) -> int: """Inserts a job into the currently used tube and returns the job ID. :param body: The data representing the job. :param priority: An integer between 0 and 4,294,967,295 where 0 is the most urgent. :param delay: The number of seconds to delay the job for. :param ttr: The maximum number of seconds the job can be reserved for before timing out. """ if isinstance(body, str): if self.encoding is None: raise TypeError("Unable to encode string with no encoding set") body = body.encode(self.encoding) cmd = b'put %d %d %d %d\r\n%b' % (priority, delay, ttr, len(body), body) return self._int_cmd(cmd, b'INSERTED')
python
def put(self, body: Body, priority: int = DEFAULT_PRIORITY, delay: int = DEFAULT_DELAY, ttr: int = DEFAULT_TTR) -> int: """Inserts a job into the currently used tube and returns the job ID. :param body: The data representing the job. :param priority: An integer between 0 and 4,294,967,295 where 0 is the most urgent. :param delay: The number of seconds to delay the job for. :param ttr: The maximum number of seconds the job can be reserved for before timing out. """ if isinstance(body, str): if self.encoding is None: raise TypeError("Unable to encode string with no encoding set") body = body.encode(self.encoding) cmd = b'put %d %d %d %d\r\n%b' % (priority, delay, ttr, len(body), body) return self._int_cmd(cmd, b'INSERTED')
Inserts a job into the currently used tube and returns the job ID. :param body: The data representing the job. :param priority: An integer between 0 and 4,294,967,295 where 0 is the most urgent. :param delay: The number of seconds to delay the job for. :param ttr: The maximum number of seconds the job can be reserved for before timing out.
https://github.com/mayhewj/greenstalk/blob/765a5e7321a101a08e400a66e88df06c57406f58/greenstalk.py#L199-L218
mayhewj/greenstalk
greenstalk.py
Client.use
def use(self, tube: str) -> None: """Changes the currently used tube. :param tube: The tube to use. """ self._send_cmd(b'use %b' % tube.encode('ascii'), b'USING')
python
def use(self, tube: str) -> None: """Changes the currently used tube. :param tube: The tube to use. """ self._send_cmd(b'use %b' % tube.encode('ascii'), b'USING')
Changes the currently used tube. :param tube: The tube to use.
https://github.com/mayhewj/greenstalk/blob/765a5e7321a101a08e400a66e88df06c57406f58/greenstalk.py#L220-L225
mayhewj/greenstalk
greenstalk.py
Client.reserve
def reserve(self, timeout: Optional[int] = None) -> Job: """Reserves a job from a tube on the watch list, giving this client exclusive access to it for the TTR. Returns the reserved job. This blocks until a job is reserved unless a ``timeout`` is given, which will raise a :class:`TimedOutError <greenstalk.TimedOutError>` if a job cannot be reserved within that time. :param timeout: The maximum number of seconds to wait. """ if timeout is None: cmd = b'reserve' else: cmd = b'reserve-with-timeout %d' % timeout return self._job_cmd(cmd, b'RESERVED')
python
def reserve(self, timeout: Optional[int] = None) -> Job: """Reserves a job from a tube on the watch list, giving this client exclusive access to it for the TTR. Returns the reserved job. This blocks until a job is reserved unless a ``timeout`` is given, which will raise a :class:`TimedOutError <greenstalk.TimedOutError>` if a job cannot be reserved within that time. :param timeout: The maximum number of seconds to wait. """ if timeout is None: cmd = b'reserve' else: cmd = b'reserve-with-timeout %d' % timeout return self._job_cmd(cmd, b'RESERVED')
Reserves a job from a tube on the watch list, giving this client exclusive access to it for the TTR. Returns the reserved job. This blocks until a job is reserved unless a ``timeout`` is given, which will raise a :class:`TimedOutError <greenstalk.TimedOutError>` if a job cannot be reserved within that time. :param timeout: The maximum number of seconds to wait.
https://github.com/mayhewj/greenstalk/blob/765a5e7321a101a08e400a66e88df06c57406f58/greenstalk.py#L227-L241
mayhewj/greenstalk
greenstalk.py
Client.delete
def delete(self, job: JobOrID) -> None: """Deletes a job. :param job: The job or job ID to delete. """ self._send_cmd(b'delete %d' % _to_id(job), b'DELETED')
python
def delete(self, job: JobOrID) -> None: """Deletes a job. :param job: The job or job ID to delete. """ self._send_cmd(b'delete %d' % _to_id(job), b'DELETED')
Deletes a job. :param job: The job or job ID to delete.
https://github.com/mayhewj/greenstalk/blob/765a5e7321a101a08e400a66e88df06c57406f58/greenstalk.py#L243-L248
mayhewj/greenstalk
greenstalk.py
Client.release
def release(self, job: Job, priority: int = DEFAULT_PRIORITY, delay: int = DEFAULT_DELAY) -> None: """Releases a reserved job. :param job: The job to release. :param priority: An integer between 0 and 4,294,967,295 where 0 is the most urgent. :param delay: The number of seconds to delay the job for. """ self._send_cmd(b'release %d %d %d' % (job.id, priority, delay), b'RELEASED')
python
def release(self, job: Job, priority: int = DEFAULT_PRIORITY, delay: int = DEFAULT_DELAY) -> None: """Releases a reserved job. :param job: The job to release. :param priority: An integer between 0 and 4,294,967,295 where 0 is the most urgent. :param delay: The number of seconds to delay the job for. """ self._send_cmd(b'release %d %d %d' % (job.id, priority, delay), b'RELEASED')
Releases a reserved job. :param job: The job to release. :param priority: An integer between 0 and 4,294,967,295 where 0 is the most urgent. :param delay: The number of seconds to delay the job for.
https://github.com/mayhewj/greenstalk/blob/765a5e7321a101a08e400a66e88df06c57406f58/greenstalk.py#L250-L261
mayhewj/greenstalk
greenstalk.py
Client.bury
def bury(self, job: Job, priority: int = DEFAULT_PRIORITY) -> None: """Buries a reserved job. :param job: The job to bury. :param priority: An integer between 0 and 4,294,967,295 where 0 is the most urgent. """ self._send_cmd(b'bury %d %d' % (job.id, priority), b'BURIED')
python
def bury(self, job: Job, priority: int = DEFAULT_PRIORITY) -> None: """Buries a reserved job. :param job: The job to bury. :param priority: An integer between 0 and 4,294,967,295 where 0 is the most urgent. """ self._send_cmd(b'bury %d %d' % (job.id, priority), b'BURIED')
Buries a reserved job. :param job: The job to bury. :param priority: An integer between 0 and 4,294,967,295 where 0 is the most urgent.
https://github.com/mayhewj/greenstalk/blob/765a5e7321a101a08e400a66e88df06c57406f58/greenstalk.py#L263-L270
mayhewj/greenstalk
greenstalk.py
Client.touch
def touch(self, job: Job) -> None: """Refreshes the TTR of a reserved job. :param job: The job to touch. """ self._send_cmd(b'touch %d' % job.id, b'TOUCHED')
python
def touch(self, job: Job) -> None: """Refreshes the TTR of a reserved job. :param job: The job to touch. """ self._send_cmd(b'touch %d' % job.id, b'TOUCHED')
Refreshes the TTR of a reserved job. :param job: The job to touch.
https://github.com/mayhewj/greenstalk/blob/765a5e7321a101a08e400a66e88df06c57406f58/greenstalk.py#L272-L277
mayhewj/greenstalk
greenstalk.py
Client.watch
def watch(self, tube: str) -> int: """Adds a tube to the watch list. Returns the number of tubes this client is watching. :param tube: The tube to watch. """ return self._int_cmd(b'watch %b' % tube.encode('ascii'), b'WATCHING')
python
def watch(self, tube: str) -> int: """Adds a tube to the watch list. Returns the number of tubes this client is watching. :param tube: The tube to watch. """ return self._int_cmd(b'watch %b' % tube.encode('ascii'), b'WATCHING')
Adds a tube to the watch list. Returns the number of tubes this client is watching. :param tube: The tube to watch.
https://github.com/mayhewj/greenstalk/blob/765a5e7321a101a08e400a66e88df06c57406f58/greenstalk.py#L279-L285
mayhewj/greenstalk
greenstalk.py
Client.ignore
def ignore(self, tube: str) -> int: """Removes a tube from the watch list. Returns the number of tubes this client is watching. :param tube: The tube to ignore. """ return self._int_cmd(b'ignore %b' % tube.encode('ascii'), b'WATCHING')
python
def ignore(self, tube: str) -> int: """Removes a tube from the watch list. Returns the number of tubes this client is watching. :param tube: The tube to ignore. """ return self._int_cmd(b'ignore %b' % tube.encode('ascii'), b'WATCHING')
Removes a tube from the watch list. Returns the number of tubes this client is watching. :param tube: The tube to ignore.
https://github.com/mayhewj/greenstalk/blob/765a5e7321a101a08e400a66e88df06c57406f58/greenstalk.py#L287-L293
mayhewj/greenstalk
greenstalk.py
Client.kick
def kick(self, bound: int) -> int: """Moves delayed and buried jobs into the ready queue and returns the number of jobs effected. Only jobs from the currently used tube are moved. A kick will only move jobs in a single state. If there are any buried jobs, only those will be moved. Otherwise delayed jobs will be moved. :param bound: The maximum number of jobs to kick. """ return self._int_cmd(b'kick %d' % bound, b'KICKED')
python
def kick(self, bound: int) -> int: """Moves delayed and buried jobs into the ready queue and returns the number of jobs effected. Only jobs from the currently used tube are moved. A kick will only move jobs in a single state. If there are any buried jobs, only those will be moved. Otherwise delayed jobs will be moved. :param bound: The maximum number of jobs to kick. """ return self._int_cmd(b'kick %d' % bound, b'KICKED')
Moves delayed and buried jobs into the ready queue and returns the number of jobs effected. Only jobs from the currently used tube are moved. A kick will only move jobs in a single state. If there are any buried jobs, only those will be moved. Otherwise delayed jobs will be moved. :param bound: The maximum number of jobs to kick.
https://github.com/mayhewj/greenstalk/blob/765a5e7321a101a08e400a66e88df06c57406f58/greenstalk.py#L314-L325
mayhewj/greenstalk
greenstalk.py
Client.kick_job
def kick_job(self, job: JobOrID) -> None: """Moves a delayed or buried job into the ready queue. :param job: The job or job ID to kick. """ self._send_cmd(b'kick-job %d' % _to_id(job), b'KICKED')
python
def kick_job(self, job: JobOrID) -> None: """Moves a delayed or buried job into the ready queue. :param job: The job or job ID to kick. """ self._send_cmd(b'kick-job %d' % _to_id(job), b'KICKED')
Moves a delayed or buried job into the ready queue. :param job: The job or job ID to kick.
https://github.com/mayhewj/greenstalk/blob/765a5e7321a101a08e400a66e88df06c57406f58/greenstalk.py#L327-L332
mayhewj/greenstalk
greenstalk.py
Client.stats_job
def stats_job(self, job: JobOrID) -> Stats: """Returns job statistics. :param job: The job or job ID to return statistics for. """ return self._stats_cmd(b'stats-job %d' % _to_id(job))
python
def stats_job(self, job: JobOrID) -> Stats: """Returns job statistics. :param job: The job or job ID to return statistics for. """ return self._stats_cmd(b'stats-job %d' % _to_id(job))
Returns job statistics. :param job: The job or job ID to return statistics for.
https://github.com/mayhewj/greenstalk/blob/765a5e7321a101a08e400a66e88df06c57406f58/greenstalk.py#L334-L339
mayhewj/greenstalk
greenstalk.py
Client.stats_tube
def stats_tube(self, tube: str) -> Stats: """Returns tube statistics. :param tube: The tube to return statistics for. """ return self._stats_cmd(b'stats-tube %b' % tube.encode('ascii'))
python
def stats_tube(self, tube: str) -> Stats: """Returns tube statistics. :param tube: The tube to return statistics for. """ return self._stats_cmd(b'stats-tube %b' % tube.encode('ascii'))
Returns tube statistics. :param tube: The tube to return statistics for.
https://github.com/mayhewj/greenstalk/blob/765a5e7321a101a08e400a66e88df06c57406f58/greenstalk.py#L341-L346
mayhewj/greenstalk
greenstalk.py
Client.pause_tube
def pause_tube(self, tube: str, delay: int) -> None: """Prevents jobs from being reserved from a tube for a period of time. :param tube: The tube to pause. :param delay: The number of seconds to pause the tube for. """ self._send_cmd(b'pause-tube %b %d' % (tube.encode('ascii'), delay), b'PAUSED')
python
def pause_tube(self, tube: str, delay: int) -> None: """Prevents jobs from being reserved from a tube for a period of time. :param tube: The tube to pause. :param delay: The number of seconds to pause the tube for. """ self._send_cmd(b'pause-tube %b %d' % (tube.encode('ascii'), delay), b'PAUSED')
Prevents jobs from being reserved from a tube for a period of time. :param tube: The tube to pause. :param delay: The number of seconds to pause the tube for.
https://github.com/mayhewj/greenstalk/blob/765a5e7321a101a08e400a66e88df06c57406f58/greenstalk.py#L352-L358
fedora-infra/fedora-messaging
fedora_messaging/cli.py
cli
def cli(conf): """The fedora-messaging command line interface.""" if conf: if not os.path.isfile(conf): raise click.exceptions.BadParameter("{} is not a file".format(conf)) try: config.conf.load_config(config_path=conf) except exceptions.ConfigurationException as e: raise click.exceptions.BadParameter(str(e)) twisted_observer = legacy_twisted_log.PythonLoggingObserver() twisted_observer.start() config.conf.setup_logging()
python
def cli(conf): """The fedora-messaging command line interface.""" if conf: if not os.path.isfile(conf): raise click.exceptions.BadParameter("{} is not a file".format(conf)) try: config.conf.load_config(config_path=conf) except exceptions.ConfigurationException as e: raise click.exceptions.BadParameter(str(e)) twisted_observer = legacy_twisted_log.PythonLoggingObserver() twisted_observer.start() config.conf.setup_logging()
The fedora-messaging command line interface.
https://github.com/fedora-infra/fedora-messaging/blob/be3e88534e2b15d579bcd24f9c4b7e795cb7e0b7/fedora_messaging/cli.py#L78-L89
fedora-infra/fedora-messaging
fedora_messaging/cli.py
consume
def consume(exchange, queue_name, routing_key, callback, app_name): """Consume messages from an AMQP queue using a Python callback.""" # The configuration validates these are not null and contain all required keys # when it is loaded. bindings = config.conf["bindings"] queues = config.conf["queues"] # The CLI and config.DEFAULTS have different defaults for the queue # settings at the moment. We should select a universal default in the # future and remove this. Unfortunately that will break backwards compatibility. if queues == config.DEFAULTS["queues"]: queues[config._default_queue_name]["durable"] = True queues[config._default_queue_name]["auto_delete"] = False if queue_name: queues = {queue_name: config.conf["queues"][config._default_queue_name]} for binding in bindings: binding["queue"] = queue_name if exchange: for binding in bindings: binding["exchange"] = exchange if routing_key: for binding in bindings: binding["routing_keys"] = routing_key callback_path = callback or config.conf["callback"] if not callback_path: raise click.ClickException( "A Python path to a callable object that accepts the message must be provided" ' with the "--callback" command line option or in the configuration file' ) try: module, cls = callback_path.strip().split(":") except ValueError: raise click.ClickException( "Unable to parse the callback path ({}); the " 'expected format is "my_package.module:' 'callable_object"'.format(callback_path) ) try: module = importlib.import_module(module) except ImportError as e: provider = "--callback argument" if callback else "configuration file" raise click.ClickException( "Failed to import the callback module ({}) provided in the {}".format( str(e), provider ) ) try: callback = getattr(module, cls) except AttributeError as e: raise click.ClickException( "Unable to import {} ({}); is the package installed? The python path should " 'be in the format "my_package.module:callable_object"'.format( callback_path, str(e) ) ) if app_name: config.conf["client_properties"]["app"] = app_name _log.info("Starting consumer with %s callback", callback_path) try: deferred_consumers = api.twisted_consume( callback, bindings=bindings, queues=queues ) deferred_consumers.addCallback(_consume_callback) deferred_consumers.addErrback(_consume_errback) except ValueError as e: click_version = pkg_resources.get_distribution("click").parsed_version if click_version < pkg_resources.parse_version("7.0"): raise click.exceptions.BadOptionUsage(str(e)) else: raise click.exceptions.BadOptionUsage("callback", str(e)) reactor.run() sys.exit(_exit_code)
python
def consume(exchange, queue_name, routing_key, callback, app_name): """Consume messages from an AMQP queue using a Python callback.""" # The configuration validates these are not null and contain all required keys # when it is loaded. bindings = config.conf["bindings"] queues = config.conf["queues"] # The CLI and config.DEFAULTS have different defaults for the queue # settings at the moment. We should select a universal default in the # future and remove this. Unfortunately that will break backwards compatibility. if queues == config.DEFAULTS["queues"]: queues[config._default_queue_name]["durable"] = True queues[config._default_queue_name]["auto_delete"] = False if queue_name: queues = {queue_name: config.conf["queues"][config._default_queue_name]} for binding in bindings: binding["queue"] = queue_name if exchange: for binding in bindings: binding["exchange"] = exchange if routing_key: for binding in bindings: binding["routing_keys"] = routing_key callback_path = callback or config.conf["callback"] if not callback_path: raise click.ClickException( "A Python path to a callable object that accepts the message must be provided" ' with the "--callback" command line option or in the configuration file' ) try: module, cls = callback_path.strip().split(":") except ValueError: raise click.ClickException( "Unable to parse the callback path ({}); the " 'expected format is "my_package.module:' 'callable_object"'.format(callback_path) ) try: module = importlib.import_module(module) except ImportError as e: provider = "--callback argument" if callback else "configuration file" raise click.ClickException( "Failed to import the callback module ({}) provided in the {}".format( str(e), provider ) ) try: callback = getattr(module, cls) except AttributeError as e: raise click.ClickException( "Unable to import {} ({}); is the package installed? The python path should " 'be in the format "my_package.module:callable_object"'.format( callback_path, str(e) ) ) if app_name: config.conf["client_properties"]["app"] = app_name _log.info("Starting consumer with %s callback", callback_path) try: deferred_consumers = api.twisted_consume( callback, bindings=bindings, queues=queues ) deferred_consumers.addCallback(_consume_callback) deferred_consumers.addErrback(_consume_errback) except ValueError as e: click_version = pkg_resources.get_distribution("click").parsed_version if click_version < pkg_resources.parse_version("7.0"): raise click.exceptions.BadOptionUsage(str(e)) else: raise click.exceptions.BadOptionUsage("callback", str(e)) reactor.run() sys.exit(_exit_code)
Consume messages from an AMQP queue using a Python callback.
https://github.com/fedora-infra/fedora-messaging/blob/be3e88534e2b15d579bcd24f9c4b7e795cb7e0b7/fedora_messaging/cli.py#L98-L178
fedora-infra/fedora-messaging
fedora_messaging/cli.py
_consume_errback
def _consume_errback(failure): """Handle any errors that occur during consumer registration.""" global _exit_code if failure.check(exceptions.BadDeclaration): _log.error( "Unable to declare the %s object on the AMQP broker. The " "broker responded with %s. Check permissions for your user.", failure.value.obj_type, failure.value.reason, ) _exit_code = 10 elif failure.check(exceptions.PermissionException): _exit_code = 15 _log.error( "The consumer could not proceed because of a permissions problem: %s", str(failure.value), ) elif failure.check(exceptions.ConnectionException): _exit_code = 14 _log.error(failure.value.reason) else: _exit_code = 11 _log.exception( "An unexpected error (%r) occurred while registering the " "consumer, please report this bug.", failure.value, ) try: reactor.stop() except error.ReactorNotRunning: pass
python
def _consume_errback(failure): """Handle any errors that occur during consumer registration.""" global _exit_code if failure.check(exceptions.BadDeclaration): _log.error( "Unable to declare the %s object on the AMQP broker. The " "broker responded with %s. Check permissions for your user.", failure.value.obj_type, failure.value.reason, ) _exit_code = 10 elif failure.check(exceptions.PermissionException): _exit_code = 15 _log.error( "The consumer could not proceed because of a permissions problem: %s", str(failure.value), ) elif failure.check(exceptions.ConnectionException): _exit_code = 14 _log.error(failure.value.reason) else: _exit_code = 11 _log.exception( "An unexpected error (%r) occurred while registering the " "consumer, please report this bug.", failure.value, ) try: reactor.stop() except error.ReactorNotRunning: pass
Handle any errors that occur during consumer registration.
https://github.com/fedora-infra/fedora-messaging/blob/be3e88534e2b15d579bcd24f9c4b7e795cb7e0b7/fedora_messaging/cli.py#L181-L211
fedora-infra/fedora-messaging
fedora_messaging/cli.py
_consume_callback
def _consume_callback(consumers): """ Callback when consumers are successfully registered. This simply registers callbacks for consumer.result deferred object which fires when the consumer stops. Args consumers (list of fedora_messaging.api.Consumer): The list of consumers that were successfully created. """ for consumer in consumers: def errback(failure): global _exit_code if failure.check(exceptions.HaltConsumer): _exit_code = failure.value.exit_code if _exit_code: _log.error( "Consumer halted with non-zero exit code (%d): %s", _exit_code, str(failure.value.reason), ) elif failure.check(exceptions.ConsumerCanceled): _exit_code = 12 _log.error( "The consumer was canceled server-side, check with system administrators." ) elif failure.check(exceptions.PermissionException): _exit_code = 15 _log.error( "The consumer could not proceed because of a permissions problem: %s", str(failure.value), ) else: _exit_code = 13 _log.error( "Unexpected error occurred in consumer %r: %r", consumer, failure ) try: reactor.stop() except error.ReactorNotRunning: pass def callback(consumer): _log.info("The %r consumer halted.", consumer) if all([c.result.called for c in consumers]): _log.info("All consumers have stopped; shutting down.") try: # Last consumer out shuts off the lights reactor.stop() except error.ReactorNotRunning: pass consumer.result.addCallbacks(callback, errback)
python
def _consume_callback(consumers): """ Callback when consumers are successfully registered. This simply registers callbacks for consumer.result deferred object which fires when the consumer stops. Args consumers (list of fedora_messaging.api.Consumer): The list of consumers that were successfully created. """ for consumer in consumers: def errback(failure): global _exit_code if failure.check(exceptions.HaltConsumer): _exit_code = failure.value.exit_code if _exit_code: _log.error( "Consumer halted with non-zero exit code (%d): %s", _exit_code, str(failure.value.reason), ) elif failure.check(exceptions.ConsumerCanceled): _exit_code = 12 _log.error( "The consumer was canceled server-side, check with system administrators." ) elif failure.check(exceptions.PermissionException): _exit_code = 15 _log.error( "The consumer could not proceed because of a permissions problem: %s", str(failure.value), ) else: _exit_code = 13 _log.error( "Unexpected error occurred in consumer %r: %r", consumer, failure ) try: reactor.stop() except error.ReactorNotRunning: pass def callback(consumer): _log.info("The %r consumer halted.", consumer) if all([c.result.called for c in consumers]): _log.info("All consumers have stopped; shutting down.") try: # Last consumer out shuts off the lights reactor.stop() except error.ReactorNotRunning: pass consumer.result.addCallbacks(callback, errback)
Callback when consumers are successfully registered. This simply registers callbacks for consumer.result deferred object which fires when the consumer stops. Args consumers (list of fedora_messaging.api.Consumer): The list of consumers that were successfully created.
https://github.com/fedora-infra/fedora-messaging/blob/be3e88534e2b15d579bcd24f9c4b7e795cb7e0b7/fedora_messaging/cli.py#L214-L268
fedora-infra/fedora-messaging
fedora_messaging/twisted/protocol.py
_add_timeout
def _add_timeout(deferred, timeout): """ Add a timeout to the given deferred. This is designed to work with both old Twisted and versions of Twisted with the addTimeout API. This is exclusively to support EL7. The deferred will errback with a :class:`defer.CancelledError` if the version of Twisted being used doesn't have the ``defer.Deferred.addTimeout`` API, otherwise it will errback with the normal ``error.TimeoutError`` """ try: deferred.addTimeout(timeout, reactor) except AttributeError: # Twisted 12.2 (in EL7) does not have the addTimeout API, so make do with # the slightly more annoying approach of scheduling a call to cancel which # is then canceled if the deferred succeeds before the timeout is up. delayed_cancel = reactor.callLater(timeout, deferred.cancel) def cancel_cancel_call(result): """Halt the delayed call to cancel if the deferred fires before the timeout.""" if not delayed_cancel.called: delayed_cancel.cancel() return result deferred.addBoth(cancel_cancel_call)
python
def _add_timeout(deferred, timeout): """ Add a timeout to the given deferred. This is designed to work with both old Twisted and versions of Twisted with the addTimeout API. This is exclusively to support EL7. The deferred will errback with a :class:`defer.CancelledError` if the version of Twisted being used doesn't have the ``defer.Deferred.addTimeout`` API, otherwise it will errback with the normal ``error.TimeoutError`` """ try: deferred.addTimeout(timeout, reactor) except AttributeError: # Twisted 12.2 (in EL7) does not have the addTimeout API, so make do with # the slightly more annoying approach of scheduling a call to cancel which # is then canceled if the deferred succeeds before the timeout is up. delayed_cancel = reactor.callLater(timeout, deferred.cancel) def cancel_cancel_call(result): """Halt the delayed call to cancel if the deferred fires before the timeout.""" if not delayed_cancel.called: delayed_cancel.cancel() return result deferred.addBoth(cancel_cancel_call)
Add a timeout to the given deferred. This is designed to work with both old Twisted and versions of Twisted with the addTimeout API. This is exclusively to support EL7. The deferred will errback with a :class:`defer.CancelledError` if the version of Twisted being used doesn't have the ``defer.Deferred.addTimeout`` API, otherwise it will errback with the normal ``error.TimeoutError``
https://github.com/fedora-infra/fedora-messaging/blob/be3e88534e2b15d579bcd24f9c4b7e795cb7e0b7/fedora_messaging/twisted/protocol.py#L72-L97
fedora-infra/fedora-messaging
fedora_messaging/twisted/protocol.py
FedoraMessagingProtocolV2._allocate_channel
def _allocate_channel(self): """ Allocate a new AMQP channel. Raises: NoFreeChannels: If this connection has reached its maximum number of channels. """ try: channel = yield self.channel() except pika.exceptions.NoFreeChannels: raise NoFreeChannels() _std_log.debug("Created AMQP channel id %d", channel.channel_number) if self._confirms: yield channel.confirm_delivery() defer.returnValue(channel)
python
def _allocate_channel(self): """ Allocate a new AMQP channel. Raises: NoFreeChannels: If this connection has reached its maximum number of channels. """ try: channel = yield self.channel() except pika.exceptions.NoFreeChannels: raise NoFreeChannels() _std_log.debug("Created AMQP channel id %d", channel.channel_number) if self._confirms: yield channel.confirm_delivery() defer.returnValue(channel)
Allocate a new AMQP channel. Raises: NoFreeChannels: If this connection has reached its maximum number of channels.
https://github.com/fedora-infra/fedora-messaging/blob/be3e88534e2b15d579bcd24f9c4b7e795cb7e0b7/fedora_messaging/twisted/protocol.py#L131-L145
fedora-infra/fedora-messaging
fedora_messaging/twisted/protocol.py
FedoraMessagingProtocolV2.connectionReady
def connectionReady(self, res=None): """ Callback invoked when the AMQP connection is ready (when self.ready fires). This API is not meant for users. Args: res: This is an unused argument that provides compatibility with Pika versions lower than 1.0.0. """ self._channel = yield self._allocate_channel() if _pika_version < pkg_resources.parse_version("1.0.0b1"): extra_args = dict(all_channels=True) else: extra_args = dict(global_qos=True) yield self._channel.basic_qos( prefetch_count=config.conf["qos"]["prefetch_count"], prefetch_size=config.conf["qos"]["prefetch_size"], **extra_args ) if _pika_version < pkg_resources.parse_version("1.0.0b1"): TwistedProtocolConnection.connectionReady(self, res)
python
def connectionReady(self, res=None): """ Callback invoked when the AMQP connection is ready (when self.ready fires). This API is not meant for users. Args: res: This is an unused argument that provides compatibility with Pika versions lower than 1.0.0. """ self._channel = yield self._allocate_channel() if _pika_version < pkg_resources.parse_version("1.0.0b1"): extra_args = dict(all_channels=True) else: extra_args = dict(global_qos=True) yield self._channel.basic_qos( prefetch_count=config.conf["qos"]["prefetch_count"], prefetch_size=config.conf["qos"]["prefetch_size"], **extra_args ) if _pika_version < pkg_resources.parse_version("1.0.0b1"): TwistedProtocolConnection.connectionReady(self, res)
Callback invoked when the AMQP connection is ready (when self.ready fires). This API is not meant for users. Args: res: This is an unused argument that provides compatibility with Pika versions lower than 1.0.0.
https://github.com/fedora-infra/fedora-messaging/blob/be3e88534e2b15d579bcd24f9c4b7e795cb7e0b7/fedora_messaging/twisted/protocol.py#L148-L169
fedora-infra/fedora-messaging
fedora_messaging/twisted/protocol.py
FedoraMessagingProtocolV2._read
def _read(self, queue_object, consumer): """ The loop that reads from the message queue and calls the consumer callback wrapper. Serialized Processing --------------------- This loop processes messages serially. This is because a second ``queue_object.get()`` operation can only occur after the Deferred from ``self._on_message`` completes. Thus, we can be sure that callbacks never run concurrently in two different threads. This is done rather than saturating the Twisted thread pool as the documentation for callbacks (in fedmsg and here) has never indicated that they are not thread-safe. In the future we can add a flag for users who are confident in their ability to write thread-safe code. Gracefully Halting ------------------ This is a loop that only exits when the consumer._running variable is set to False. The call to cancel will set this to false, as will the call to :meth:`pauseProducing`. These calls will then wait for the Deferred from this function to call back in order to ensure the message finishes processing. The Deferred object only completes when this method returns, so we need to periodically check the status of consumer._running. That's why there's a short timeout on the call to ``queue_object.get``. queue_object (pika.adapters.twisted_connection.ClosableDeferredQueue): The AMQP queue the consumer is bound to. consumer (dict): A dictionary describing the consumer for the given queue_object. """ while consumer._running: try: deferred_get = queue_object.get() _add_timeout(deferred_get, 1) channel, delivery_frame, properties, body = yield deferred_get except (defer.TimeoutError, defer.CancelledError): continue _std_log.debug( "Message arrived with delivery tag %s for %r", delivery_frame.delivery_tag, consumer._tag, ) try: message = get_message(delivery_frame.routing_key, properties, body) message.queue = consumer.queue except ValidationError: _std_log.warning( "Message id %s did not pass validation; ignoring message", properties.message_id, ) yield channel.basic_nack( delivery_tag=delivery_frame.delivery_tag, requeue=False ) continue try: _std_log.info( "Consuming message from topic %s (message id %s)", message.topic, properties.message_id, ) yield threads.deferToThread(consumer.callback, message) except Nack: _std_log.warning( "Returning message id %s to the queue", properties.message_id ) yield channel.basic_nack( delivery_tag=delivery_frame.delivery_tag, requeue=True ) except Drop: _std_log.warning( "Consumer requested message id %s be dropped", properties.message_id ) yield channel.basic_nack( delivery_tag=delivery_frame.delivery_tag, requeue=False ) except HaltConsumer as e: _std_log.info( "Consumer indicated it wishes consumption to halt, shutting down" ) if e.requeue: yield channel.basic_nack( delivery_tag=delivery_frame.delivery_tag, requeue=True ) else: yield channel.basic_ack(delivery_tag=delivery_frame.delivery_tag) raise e except Exception as e: _std_log.exception( "Received unexpected exception from consumer %r", consumer ) yield channel.basic_nack(delivery_tag=0, multiple=True, requeue=True) raise e else: _std_log.info( "Successfully consumed message from topic %s (message id %s)", message.topic, properties.message_id, ) yield channel.basic_ack(delivery_tag=delivery_frame.delivery_tag)
python
def _read(self, queue_object, consumer): """ The loop that reads from the message queue and calls the consumer callback wrapper. Serialized Processing --------------------- This loop processes messages serially. This is because a second ``queue_object.get()`` operation can only occur after the Deferred from ``self._on_message`` completes. Thus, we can be sure that callbacks never run concurrently in two different threads. This is done rather than saturating the Twisted thread pool as the documentation for callbacks (in fedmsg and here) has never indicated that they are not thread-safe. In the future we can add a flag for users who are confident in their ability to write thread-safe code. Gracefully Halting ------------------ This is a loop that only exits when the consumer._running variable is set to False. The call to cancel will set this to false, as will the call to :meth:`pauseProducing`. These calls will then wait for the Deferred from this function to call back in order to ensure the message finishes processing. The Deferred object only completes when this method returns, so we need to periodically check the status of consumer._running. That's why there's a short timeout on the call to ``queue_object.get``. queue_object (pika.adapters.twisted_connection.ClosableDeferredQueue): The AMQP queue the consumer is bound to. consumer (dict): A dictionary describing the consumer for the given queue_object. """ while consumer._running: try: deferred_get = queue_object.get() _add_timeout(deferred_get, 1) channel, delivery_frame, properties, body = yield deferred_get except (defer.TimeoutError, defer.CancelledError): continue _std_log.debug( "Message arrived with delivery tag %s for %r", delivery_frame.delivery_tag, consumer._tag, ) try: message = get_message(delivery_frame.routing_key, properties, body) message.queue = consumer.queue except ValidationError: _std_log.warning( "Message id %s did not pass validation; ignoring message", properties.message_id, ) yield channel.basic_nack( delivery_tag=delivery_frame.delivery_tag, requeue=False ) continue try: _std_log.info( "Consuming message from topic %s (message id %s)", message.topic, properties.message_id, ) yield threads.deferToThread(consumer.callback, message) except Nack: _std_log.warning( "Returning message id %s to the queue", properties.message_id ) yield channel.basic_nack( delivery_tag=delivery_frame.delivery_tag, requeue=True ) except Drop: _std_log.warning( "Consumer requested message id %s be dropped", properties.message_id ) yield channel.basic_nack( delivery_tag=delivery_frame.delivery_tag, requeue=False ) except HaltConsumer as e: _std_log.info( "Consumer indicated it wishes consumption to halt, shutting down" ) if e.requeue: yield channel.basic_nack( delivery_tag=delivery_frame.delivery_tag, requeue=True ) else: yield channel.basic_ack(delivery_tag=delivery_frame.delivery_tag) raise e except Exception as e: _std_log.exception( "Received unexpected exception from consumer %r", consumer ) yield channel.basic_nack(delivery_tag=0, multiple=True, requeue=True) raise e else: _std_log.info( "Successfully consumed message from topic %s (message id %s)", message.topic, properties.message_id, ) yield channel.basic_ack(delivery_tag=delivery_frame.delivery_tag)
The loop that reads from the message queue and calls the consumer callback wrapper. Serialized Processing --------------------- This loop processes messages serially. This is because a second ``queue_object.get()`` operation can only occur after the Deferred from ``self._on_message`` completes. Thus, we can be sure that callbacks never run concurrently in two different threads. This is done rather than saturating the Twisted thread pool as the documentation for callbacks (in fedmsg and here) has never indicated that they are not thread-safe. In the future we can add a flag for users who are confident in their ability to write thread-safe code. Gracefully Halting ------------------ This is a loop that only exits when the consumer._running variable is set to False. The call to cancel will set this to false, as will the call to :meth:`pauseProducing`. These calls will then wait for the Deferred from this function to call back in order to ensure the message finishes processing. The Deferred object only completes when this method returns, so we need to periodically check the status of consumer._running. That's why there's a short timeout on the call to ``queue_object.get``. queue_object (pika.adapters.twisted_connection.ClosableDeferredQueue): The AMQP queue the consumer is bound to. consumer (dict): A dictionary describing the consumer for the given queue_object.
https://github.com/fedora-infra/fedora-messaging/blob/be3e88534e2b15d579bcd24f9c4b7e795cb7e0b7/fedora_messaging/twisted/protocol.py#L172-L277
fedora-infra/fedora-messaging
fedora_messaging/twisted/protocol.py
FedoraMessagingProtocolV2.publish
def publish(self, message, exchange): """ Publish a :class:`fedora_messaging.message.Message` to an `exchange`_ on the message broker. Args: message (message.Message): The message to publish. exchange (str): The name of the AMQP exchange to publish to Raises: NoFreeChannels: If there are no available channels on this connection. If this occurs, you can either reduce the number of consumers on this connection or create an additional connection. PublishReturned: If the broker rejected the message. This can happen if there are resource limits that have been reached (full disk, for example) or if the message will be routed to 0 queues and the exchange is set to reject such messages. .. _exchange: https://www.rabbitmq.com/tutorials/amqp-concepts.html#exchanges """ message.validate() try: yield self._channel.basic_publish( exchange=exchange, routing_key=message._encoded_routing_key, body=message._encoded_body, properties=message._properties, ) except (pika.exceptions.NackError, pika.exceptions.UnroutableError) as e: _std_log.error("Message was rejected by the broker (%s)", str(e)) raise PublishReturned(reason=e) except pika.exceptions.ChannelClosed: self._channel = yield self._allocate_channel() yield self.publish(message, exchange) except pika.exceptions.ConnectionClosed as e: raise ConnectionException(reason=e)
python
def publish(self, message, exchange): """ Publish a :class:`fedora_messaging.message.Message` to an `exchange`_ on the message broker. Args: message (message.Message): The message to publish. exchange (str): The name of the AMQP exchange to publish to Raises: NoFreeChannels: If there are no available channels on this connection. If this occurs, you can either reduce the number of consumers on this connection or create an additional connection. PublishReturned: If the broker rejected the message. This can happen if there are resource limits that have been reached (full disk, for example) or if the message will be routed to 0 queues and the exchange is set to reject such messages. .. _exchange: https://www.rabbitmq.com/tutorials/amqp-concepts.html#exchanges """ message.validate() try: yield self._channel.basic_publish( exchange=exchange, routing_key=message._encoded_routing_key, body=message._encoded_body, properties=message._properties, ) except (pika.exceptions.NackError, pika.exceptions.UnroutableError) as e: _std_log.error("Message was rejected by the broker (%s)", str(e)) raise PublishReturned(reason=e) except pika.exceptions.ChannelClosed: self._channel = yield self._allocate_channel() yield self.publish(message, exchange) except pika.exceptions.ConnectionClosed as e: raise ConnectionException(reason=e)
Publish a :class:`fedora_messaging.message.Message` to an `exchange`_ on the message broker. Args: message (message.Message): The message to publish. exchange (str): The name of the AMQP exchange to publish to Raises: NoFreeChannels: If there are no available channels on this connection. If this occurs, you can either reduce the number of consumers on this connection or create an additional connection. PublishReturned: If the broker rejected the message. This can happen if there are resource limits that have been reached (full disk, for example) or if the message will be routed to 0 queues and the exchange is set to reject such messages. .. _exchange: https://www.rabbitmq.com/tutorials/amqp-concepts.html#exchanges
https://github.com/fedora-infra/fedora-messaging/blob/be3e88534e2b15d579bcd24f9c4b7e795cb7e0b7/fedora_messaging/twisted/protocol.py#L280-L315