repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
sequence
docstring
stringlengths
3
17.3k
docstring_tokens
sequence
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
wmayner/pyphi
pyphi/tpm.py
marginalize_out
def marginalize_out(node_indices, tpm): """Marginalize out nodes from a TPM. Args: node_indices (list[int]): The indices of nodes to be marginalized out. tpm (np.ndarray): The TPM to marginalize the node out of. Returns: np.ndarray: A TPM with the same number of dimensions, with the nodes marginalized out. """ return tpm.sum(tuple(node_indices), keepdims=True) / ( np.array(tpm.shape)[list(node_indices)].prod())
python
def marginalize_out(node_indices, tpm): """Marginalize out nodes from a TPM. Args: node_indices (list[int]): The indices of nodes to be marginalized out. tpm (np.ndarray): The TPM to marginalize the node out of. Returns: np.ndarray: A TPM with the same number of dimensions, with the nodes marginalized out. """ return tpm.sum(tuple(node_indices), keepdims=True) / ( np.array(tpm.shape)[list(node_indices)].prod())
[ "def", "marginalize_out", "(", "node_indices", ",", "tpm", ")", ":", "return", "tpm", ".", "sum", "(", "tuple", "(", "node_indices", ")", ",", "keepdims", "=", "True", ")", "/", "(", "np", ".", "array", "(", "tpm", ".", "shape", ")", "[", "list", "(", "node_indices", ")", "]", ".", "prod", "(", ")", ")" ]
Marginalize out nodes from a TPM. Args: node_indices (list[int]): The indices of nodes to be marginalized out. tpm (np.ndarray): The TPM to marginalize the node out of. Returns: np.ndarray: A TPM with the same number of dimensions, with the nodes marginalized out.
[ "Marginalize", "out", "nodes", "from", "a", "TPM", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/tpm.py#L57-L69
train
wmayner/pyphi
pyphi/tpm.py
infer_edge
def infer_edge(tpm, a, b, contexts): """Infer the presence or absence of an edge from node A to node B. Let |S| be the set of all nodes in a network. Let |A' = S - {A}|. We call the state of |A'| the context |C| of |A|. There is an edge from |A| to |B| if there exists any context |C(A)| such that |Pr(B | C(A), A=0) != Pr(B | C(A), A=1)|. Args: tpm (np.ndarray): The TPM in state-by-node, multidimensional form. a (int): The index of the putative source node. b (int): The index of the putative sink node. Returns: bool: ``True`` if the edge |A -> B| exists, ``False`` otherwise. """ def a_in_context(context): """Given a context C(A), return the states of the full system with A OFF and ON, respectively. """ a_off = context[:a] + OFF + context[a:] a_on = context[:a] + ON + context[a:] return (a_off, a_on) def a_affects_b_in_context(context): """Return ``True`` if A has an effect on B, given a context.""" a_off, a_on = a_in_context(context) return tpm[a_off][b] != tpm[a_on][b] return any(a_affects_b_in_context(context) for context in contexts)
python
def infer_edge(tpm, a, b, contexts): """Infer the presence or absence of an edge from node A to node B. Let |S| be the set of all nodes in a network. Let |A' = S - {A}|. We call the state of |A'| the context |C| of |A|. There is an edge from |A| to |B| if there exists any context |C(A)| such that |Pr(B | C(A), A=0) != Pr(B | C(A), A=1)|. Args: tpm (np.ndarray): The TPM in state-by-node, multidimensional form. a (int): The index of the putative source node. b (int): The index of the putative sink node. Returns: bool: ``True`` if the edge |A -> B| exists, ``False`` otherwise. """ def a_in_context(context): """Given a context C(A), return the states of the full system with A OFF and ON, respectively. """ a_off = context[:a] + OFF + context[a:] a_on = context[:a] + ON + context[a:] return (a_off, a_on) def a_affects_b_in_context(context): """Return ``True`` if A has an effect on B, given a context.""" a_off, a_on = a_in_context(context) return tpm[a_off][b] != tpm[a_on][b] return any(a_affects_b_in_context(context) for context in contexts)
[ "def", "infer_edge", "(", "tpm", ",", "a", ",", "b", ",", "contexts", ")", ":", "def", "a_in_context", "(", "context", ")", ":", "\"\"\"Given a context C(A), return the states of the full system with A\n OFF and ON, respectively.\n \"\"\"", "a_off", "=", "context", "[", ":", "a", "]", "+", "OFF", "+", "context", "[", "a", ":", "]", "a_on", "=", "context", "[", ":", "a", "]", "+", "ON", "+", "context", "[", "a", ":", "]", "return", "(", "a_off", ",", "a_on", ")", "def", "a_affects_b_in_context", "(", "context", ")", ":", "\"\"\"Return ``True`` if A has an effect on B, given a context.\"\"\"", "a_off", ",", "a_on", "=", "a_in_context", "(", "context", ")", "return", "tpm", "[", "a_off", "]", "[", "b", "]", "!=", "tpm", "[", "a_on", "]", "[", "b", "]", "return", "any", "(", "a_affects_b_in_context", "(", "context", ")", "for", "context", "in", "contexts", ")" ]
Infer the presence or absence of an edge from node A to node B. Let |S| be the set of all nodes in a network. Let |A' = S - {A}|. We call the state of |A'| the context |C| of |A|. There is an edge from |A| to |B| if there exists any context |C(A)| such that |Pr(B | C(A), A=0) != Pr(B | C(A), A=1)|. Args: tpm (np.ndarray): The TPM in state-by-node, multidimensional form. a (int): The index of the putative source node. b (int): The index of the putative sink node. Returns: bool: ``True`` if the edge |A -> B| exists, ``False`` otherwise.
[ "Infer", "the", "presence", "or", "absence", "of", "an", "edge", "from", "node", "A", "to", "node", "B", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/tpm.py#L72-L101
train
wmayner/pyphi
pyphi/tpm.py
infer_cm
def infer_cm(tpm): """Infer the connectivity matrix associated with a state-by-node TPM in multidimensional form. """ network_size = tpm.shape[-1] all_contexts = tuple(all_states(network_size - 1)) cm = np.empty((network_size, network_size), dtype=int) for a, b in np.ndindex(cm.shape): cm[a][b] = infer_edge(tpm, a, b, all_contexts) return cm
python
def infer_cm(tpm): """Infer the connectivity matrix associated with a state-by-node TPM in multidimensional form. """ network_size = tpm.shape[-1] all_contexts = tuple(all_states(network_size - 1)) cm = np.empty((network_size, network_size), dtype=int) for a, b in np.ndindex(cm.shape): cm[a][b] = infer_edge(tpm, a, b, all_contexts) return cm
[ "def", "infer_cm", "(", "tpm", ")", ":", "network_size", "=", "tpm", ".", "shape", "[", "-", "1", "]", "all_contexts", "=", "tuple", "(", "all_states", "(", "network_size", "-", "1", ")", ")", "cm", "=", "np", ".", "empty", "(", "(", "network_size", ",", "network_size", ")", ",", "dtype", "=", "int", ")", "for", "a", ",", "b", "in", "np", ".", "ndindex", "(", "cm", ".", "shape", ")", ":", "cm", "[", "a", "]", "[", "b", "]", "=", "infer_edge", "(", "tpm", ",", "a", ",", "b", ",", "all_contexts", ")", "return", "cm" ]
Infer the connectivity matrix associated with a state-by-node TPM in multidimensional form.
[ "Infer", "the", "connectivity", "matrix", "associated", "with", "a", "state", "-", "by", "-", "node", "TPM", "in", "multidimensional", "form", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/tpm.py#L104-L113
train
wmayner/pyphi
pyphi/compute/parallel.py
get_num_processes
def get_num_processes(): """Return the number of processes to use in parallel.""" cpu_count = multiprocessing.cpu_count() if config.NUMBER_OF_CORES == 0: raise ValueError( 'Invalid NUMBER_OF_CORES; value may not be 0.') if config.NUMBER_OF_CORES > cpu_count: log.info('Requesting %s cores; only %s available', config.NUMBER_OF_CORES, cpu_count) return cpu_count if config.NUMBER_OF_CORES < 0: num = cpu_count + config.NUMBER_OF_CORES + 1 if num <= 0: raise ValueError( 'Invalid NUMBER_OF_CORES; negative value is too negative: ' 'requesting {} cores, {} available.'.format(num, cpu_count)) return num return config.NUMBER_OF_CORES
python
def get_num_processes(): """Return the number of processes to use in parallel.""" cpu_count = multiprocessing.cpu_count() if config.NUMBER_OF_CORES == 0: raise ValueError( 'Invalid NUMBER_OF_CORES; value may not be 0.') if config.NUMBER_OF_CORES > cpu_count: log.info('Requesting %s cores; only %s available', config.NUMBER_OF_CORES, cpu_count) return cpu_count if config.NUMBER_OF_CORES < 0: num = cpu_count + config.NUMBER_OF_CORES + 1 if num <= 0: raise ValueError( 'Invalid NUMBER_OF_CORES; negative value is too negative: ' 'requesting {} cores, {} available.'.format(num, cpu_count)) return num return config.NUMBER_OF_CORES
[ "def", "get_num_processes", "(", ")", ":", "cpu_count", "=", "multiprocessing", ".", "cpu_count", "(", ")", "if", "config", ".", "NUMBER_OF_CORES", "==", "0", ":", "raise", "ValueError", "(", "'Invalid NUMBER_OF_CORES; value may not be 0.'", ")", "if", "config", ".", "NUMBER_OF_CORES", ">", "cpu_count", ":", "log", ".", "info", "(", "'Requesting %s cores; only %s available'", ",", "config", ".", "NUMBER_OF_CORES", ",", "cpu_count", ")", "return", "cpu_count", "if", "config", ".", "NUMBER_OF_CORES", "<", "0", ":", "num", "=", "cpu_count", "+", "config", ".", "NUMBER_OF_CORES", "+", "1", "if", "num", "<=", "0", ":", "raise", "ValueError", "(", "'Invalid NUMBER_OF_CORES; negative value is too negative: '", "'requesting {} cores, {} available.'", ".", "format", "(", "num", ",", "cpu_count", ")", ")", "return", "num", "return", "config", ".", "NUMBER_OF_CORES" ]
Return the number of processes to use in parallel.
[ "Return", "the", "number", "of", "processes", "to", "use", "in", "parallel", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/compute/parallel.py#L24-L46
train
wmayner/pyphi
pyphi/compute/parallel.py
MapReduce.init_progress_bar
def init_progress_bar(self): """Initialize and return a progress bar.""" # Forked worker processes can't show progress bars. disable = MapReduce._forked or not config.PROGRESS_BARS # Don't materialize iterable unless we have to: huge iterables # (e.g. of `KCuts`) eat memory. if disable: total = None else: self.iterable = list(self.iterable) total = len(self.iterable) return tqdm(total=total, disable=disable, leave=False, desc=self.description)
python
def init_progress_bar(self): """Initialize and return a progress bar.""" # Forked worker processes can't show progress bars. disable = MapReduce._forked or not config.PROGRESS_BARS # Don't materialize iterable unless we have to: huge iterables # (e.g. of `KCuts`) eat memory. if disable: total = None else: self.iterable = list(self.iterable) total = len(self.iterable) return tqdm(total=total, disable=disable, leave=False, desc=self.description)
[ "def", "init_progress_bar", "(", "self", ")", ":", "# Forked worker processes can't show progress bars.", "disable", "=", "MapReduce", ".", "_forked", "or", "not", "config", ".", "PROGRESS_BARS", "# Don't materialize iterable unless we have to: huge iterables", "# (e.g. of `KCuts`) eat memory.", "if", "disable", ":", "total", "=", "None", "else", ":", "self", ".", "iterable", "=", "list", "(", "self", ".", "iterable", ")", "total", "=", "len", "(", "self", ".", "iterable", ")", "return", "tqdm", "(", "total", "=", "total", ",", "disable", "=", "disable", ",", "leave", "=", "False", ",", "desc", "=", "self", ".", "description", ")" ]
Initialize and return a progress bar.
[ "Initialize", "and", "return", "a", "progress", "bar", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/compute/parallel.py#L144-L158
train
wmayner/pyphi
pyphi/compute/parallel.py
MapReduce.worker
def worker(compute, task_queue, result_queue, log_queue, complete, *context): """A worker process, run by ``multiprocessing.Process``.""" try: MapReduce._forked = True log.debug('Worker process starting...') configure_worker_logging(log_queue) for obj in iter(task_queue.get, POISON_PILL): if complete.is_set(): log.debug('Worker received signal - exiting early') break log.debug('Worker got %s', obj) result_queue.put(compute(obj, *context)) log.debug('Worker finished %s', obj) result_queue.put(POISON_PILL) log.debug('Worker process exiting') except Exception as e: # pylint: disable=broad-except result_queue.put(ExceptionWrapper(e))
python
def worker(compute, task_queue, result_queue, log_queue, complete, *context): """A worker process, run by ``multiprocessing.Process``.""" try: MapReduce._forked = True log.debug('Worker process starting...') configure_worker_logging(log_queue) for obj in iter(task_queue.get, POISON_PILL): if complete.is_set(): log.debug('Worker received signal - exiting early') break log.debug('Worker got %s', obj) result_queue.put(compute(obj, *context)) log.debug('Worker finished %s', obj) result_queue.put(POISON_PILL) log.debug('Worker process exiting') except Exception as e: # pylint: disable=broad-except result_queue.put(ExceptionWrapper(e))
[ "def", "worker", "(", "compute", ",", "task_queue", ",", "result_queue", ",", "log_queue", ",", "complete", ",", "*", "context", ")", ":", "try", ":", "MapReduce", ".", "_forked", "=", "True", "log", ".", "debug", "(", "'Worker process starting...'", ")", "configure_worker_logging", "(", "log_queue", ")", "for", "obj", "in", "iter", "(", "task_queue", ".", "get", ",", "POISON_PILL", ")", ":", "if", "complete", ".", "is_set", "(", ")", ":", "log", ".", "debug", "(", "'Worker received signal - exiting early'", ")", "break", "log", ".", "debug", "(", "'Worker got %s'", ",", "obj", ")", "result_queue", ".", "put", "(", "compute", "(", "obj", ",", "*", "context", ")", ")", "log", ".", "debug", "(", "'Worker finished %s'", ",", "obj", ")", "result_queue", ".", "put", "(", "POISON_PILL", ")", "log", ".", "debug", "(", "'Worker process exiting'", ")", "except", "Exception", "as", "e", ":", "# pylint: disable=broad-except", "result_queue", ".", "put", "(", "ExceptionWrapper", "(", "e", ")", ")" ]
A worker process, run by ``multiprocessing.Process``.
[ "A", "worker", "process", "run", "by", "multiprocessing", ".", "Process", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/compute/parallel.py#L161-L183
train
wmayner/pyphi
pyphi/compute/parallel.py
MapReduce.start_parallel
def start_parallel(self): """Initialize all queues and start the worker processes and the log thread. """ self.num_processes = get_num_processes() self.task_queue = multiprocessing.Queue(maxsize=Q_MAX_SIZE) self.result_queue = multiprocessing.Queue() self.log_queue = multiprocessing.Queue() # Used to signal worker processes when a result is found that allows # the computation to terminate early. self.complete = multiprocessing.Event() args = (self.compute, self.task_queue, self.result_queue, self.log_queue, self.complete) + self.context self.processes = [ multiprocessing.Process(target=self.worker, args=args, daemon=True) for i in range(self.num_processes)] for process in self.processes: process.start() self.log_thread = LogThread(self.log_queue) self.log_thread.start() self.initialize_tasks()
python
def start_parallel(self): """Initialize all queues and start the worker processes and the log thread. """ self.num_processes = get_num_processes() self.task_queue = multiprocessing.Queue(maxsize=Q_MAX_SIZE) self.result_queue = multiprocessing.Queue() self.log_queue = multiprocessing.Queue() # Used to signal worker processes when a result is found that allows # the computation to terminate early. self.complete = multiprocessing.Event() args = (self.compute, self.task_queue, self.result_queue, self.log_queue, self.complete) + self.context self.processes = [ multiprocessing.Process(target=self.worker, args=args, daemon=True) for i in range(self.num_processes)] for process in self.processes: process.start() self.log_thread = LogThread(self.log_queue) self.log_thread.start() self.initialize_tasks()
[ "def", "start_parallel", "(", "self", ")", ":", "self", ".", "num_processes", "=", "get_num_processes", "(", ")", "self", ".", "task_queue", "=", "multiprocessing", ".", "Queue", "(", "maxsize", "=", "Q_MAX_SIZE", ")", "self", ".", "result_queue", "=", "multiprocessing", ".", "Queue", "(", ")", "self", ".", "log_queue", "=", "multiprocessing", ".", "Queue", "(", ")", "# Used to signal worker processes when a result is found that allows", "# the computation to terminate early.", "self", ".", "complete", "=", "multiprocessing", ".", "Event", "(", ")", "args", "=", "(", "self", ".", "compute", ",", "self", ".", "task_queue", ",", "self", ".", "result_queue", ",", "self", ".", "log_queue", ",", "self", ".", "complete", ")", "+", "self", ".", "context", "self", ".", "processes", "=", "[", "multiprocessing", ".", "Process", "(", "target", "=", "self", ".", "worker", ",", "args", "=", "args", ",", "daemon", "=", "True", ")", "for", "i", "in", "range", "(", "self", ".", "num_processes", ")", "]", "for", "process", "in", "self", ".", "processes", ":", "process", ".", "start", "(", ")", "self", ".", "log_thread", "=", "LogThread", "(", "self", ".", "log_queue", ")", "self", ".", "log_thread", ".", "start", "(", ")", "self", ".", "initialize_tasks", "(", ")" ]
Initialize all queues and start the worker processes and the log thread.
[ "Initialize", "all", "queues", "and", "start", "the", "worker", "processes", "and", "the", "log", "thread", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/compute/parallel.py#L185-L211
train
wmayner/pyphi
pyphi/compute/parallel.py
MapReduce.initialize_tasks
def initialize_tasks(self): """Load the input queue to capacity. Overfilling causes a deadlock when `queue.put` blocks when full, so further tasks are enqueued as results are returned. """ # Add a poison pill to shutdown each process. self.tasks = chain(self.iterable, [POISON_PILL] * self.num_processes) for task in islice(self.tasks, Q_MAX_SIZE): log.debug('Putting %s on queue', task) self.task_queue.put(task)
python
def initialize_tasks(self): """Load the input queue to capacity. Overfilling causes a deadlock when `queue.put` blocks when full, so further tasks are enqueued as results are returned. """ # Add a poison pill to shutdown each process. self.tasks = chain(self.iterable, [POISON_PILL] * self.num_processes) for task in islice(self.tasks, Q_MAX_SIZE): log.debug('Putting %s on queue', task) self.task_queue.put(task)
[ "def", "initialize_tasks", "(", "self", ")", ":", "# Add a poison pill to shutdown each process.", "self", ".", "tasks", "=", "chain", "(", "self", ".", "iterable", ",", "[", "POISON_PILL", "]", "*", "self", ".", "num_processes", ")", "for", "task", "in", "islice", "(", "self", ".", "tasks", ",", "Q_MAX_SIZE", ")", ":", "log", ".", "debug", "(", "'Putting %s on queue'", ",", "task", ")", "self", ".", "task_queue", ".", "put", "(", "task", ")" ]
Load the input queue to capacity. Overfilling causes a deadlock when `queue.put` blocks when full, so further tasks are enqueued as results are returned.
[ "Load", "the", "input", "queue", "to", "capacity", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/compute/parallel.py#L213-L223
train
wmayner/pyphi
pyphi/compute/parallel.py
MapReduce.maybe_put_task
def maybe_put_task(self): """Enqueue the next task, if there are any waiting.""" try: task = next(self.tasks) except StopIteration: pass else: log.debug('Putting %s on queue', task) self.task_queue.put(task)
python
def maybe_put_task(self): """Enqueue the next task, if there are any waiting.""" try: task = next(self.tasks) except StopIteration: pass else: log.debug('Putting %s on queue', task) self.task_queue.put(task)
[ "def", "maybe_put_task", "(", "self", ")", ":", "try", ":", "task", "=", "next", "(", "self", ".", "tasks", ")", "except", "StopIteration", ":", "pass", "else", ":", "log", ".", "debug", "(", "'Putting %s on queue'", ",", "task", ")", "self", ".", "task_queue", ".", "put", "(", "task", ")" ]
Enqueue the next task, if there are any waiting.
[ "Enqueue", "the", "next", "task", "if", "there", "are", "any", "waiting", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/compute/parallel.py#L225-L233
train
wmayner/pyphi
pyphi/compute/parallel.py
MapReduce.run_parallel
def run_parallel(self): """Perform the computation in parallel, reading results from the output queue and passing them to ``process_result``. """ try: self.start_parallel() result = self.empty_result(*self.context) while self.num_processes > 0: r = self.result_queue.get() self.maybe_put_task() if r is POISON_PILL: self.num_processes -= 1 elif isinstance(r, ExceptionWrapper): r.reraise() else: result = self.process_result(r, result) self.progress.update(1) # Did `process_result` decide to terminate early? if self.done: self.complete.set() self.finish_parallel() except Exception: raise finally: log.debug('Removing progress bar') self.progress.close() return result
python
def run_parallel(self): """Perform the computation in parallel, reading results from the output queue and passing them to ``process_result``. """ try: self.start_parallel() result = self.empty_result(*self.context) while self.num_processes > 0: r = self.result_queue.get() self.maybe_put_task() if r is POISON_PILL: self.num_processes -= 1 elif isinstance(r, ExceptionWrapper): r.reraise() else: result = self.process_result(r, result) self.progress.update(1) # Did `process_result` decide to terminate early? if self.done: self.complete.set() self.finish_parallel() except Exception: raise finally: log.debug('Removing progress bar') self.progress.close() return result
[ "def", "run_parallel", "(", "self", ")", ":", "try", ":", "self", ".", "start_parallel", "(", ")", "result", "=", "self", ".", "empty_result", "(", "*", "self", ".", "context", ")", "while", "self", ".", "num_processes", ">", "0", ":", "r", "=", "self", ".", "result_queue", ".", "get", "(", ")", "self", ".", "maybe_put_task", "(", ")", "if", "r", "is", "POISON_PILL", ":", "self", ".", "num_processes", "-=", "1", "elif", "isinstance", "(", "r", ",", "ExceptionWrapper", ")", ":", "r", ".", "reraise", "(", ")", "else", ":", "result", "=", "self", ".", "process_result", "(", "r", ",", "result", ")", "self", ".", "progress", ".", "update", "(", "1", ")", "# Did `process_result` decide to terminate early?", "if", "self", ".", "done", ":", "self", ".", "complete", ".", "set", "(", ")", "self", ".", "finish_parallel", "(", ")", "except", "Exception", ":", "raise", "finally", ":", "log", ".", "debug", "(", "'Removing progress bar'", ")", "self", ".", "progress", ".", "close", "(", ")", "return", "result" ]
Perform the computation in parallel, reading results from the output queue and passing them to ``process_result``.
[ "Perform", "the", "computation", "in", "parallel", "reading", "results", "from", "the", "output", "queue", "and", "passing", "them", "to", "process_result", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/compute/parallel.py#L235-L269
train
wmayner/pyphi
pyphi/compute/parallel.py
MapReduce.finish_parallel
def finish_parallel(self): """Orderly shutdown of workers.""" for process in self.processes: process.join() # Shutdown the log thread log.debug('Joining log thread') self.log_queue.put(POISON_PILL) self.log_thread.join() self.log_queue.close() # Close all queues log.debug('Closing queues') self.task_queue.close() self.result_queue.close()
python
def finish_parallel(self): """Orderly shutdown of workers.""" for process in self.processes: process.join() # Shutdown the log thread log.debug('Joining log thread') self.log_queue.put(POISON_PILL) self.log_thread.join() self.log_queue.close() # Close all queues log.debug('Closing queues') self.task_queue.close() self.result_queue.close()
[ "def", "finish_parallel", "(", "self", ")", ":", "for", "process", "in", "self", ".", "processes", ":", "process", ".", "join", "(", ")", "# Shutdown the log thread", "log", ".", "debug", "(", "'Joining log thread'", ")", "self", ".", "log_queue", ".", "put", "(", "POISON_PILL", ")", "self", ".", "log_thread", ".", "join", "(", ")", "self", ".", "log_queue", ".", "close", "(", ")", "# Close all queues", "log", ".", "debug", "(", "'Closing queues'", ")", "self", ".", "task_queue", ".", "close", "(", ")", "self", ".", "result_queue", ".", "close", "(", ")" ]
Orderly shutdown of workers.
[ "Orderly", "shutdown", "of", "workers", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/compute/parallel.py#L271-L285
train
wmayner/pyphi
pyphi/compute/parallel.py
MapReduce.run_sequential
def run_sequential(self): """Perform the computation sequentially, only holding two computed objects in memory at a time. """ try: result = self.empty_result(*self.context) for obj in self.iterable: r = self.compute(obj, *self.context) result = self.process_result(r, result) self.progress.update(1) # Short-circuited? if self.done: break except Exception as e: raise e finally: self.progress.close() return result
python
def run_sequential(self): """Perform the computation sequentially, only holding two computed objects in memory at a time. """ try: result = self.empty_result(*self.context) for obj in self.iterable: r = self.compute(obj, *self.context) result = self.process_result(r, result) self.progress.update(1) # Short-circuited? if self.done: break except Exception as e: raise e finally: self.progress.close() return result
[ "def", "run_sequential", "(", "self", ")", ":", "try", ":", "result", "=", "self", ".", "empty_result", "(", "*", "self", ".", "context", ")", "for", "obj", "in", "self", ".", "iterable", ":", "r", "=", "self", ".", "compute", "(", "obj", ",", "*", "self", ".", "context", ")", "result", "=", "self", ".", "process_result", "(", "r", ",", "result", ")", "self", ".", "progress", ".", "update", "(", "1", ")", "# Short-circuited?", "if", "self", ".", "done", ":", "break", "except", "Exception", "as", "e", ":", "raise", "e", "finally", ":", "self", ".", "progress", ".", "close", "(", ")", "return", "result" ]
Perform the computation sequentially, only holding two computed objects in memory at a time.
[ "Perform", "the", "computation", "sequentially", "only", "holding", "two", "computed", "objects", "in", "memory", "at", "a", "time", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/compute/parallel.py#L287-L307
train
wmayner/pyphi
pyphi/conf.py
configure_logging
def configure_logging(conf): """Reconfigure PyPhi logging based on the current configuration.""" logging.config.dictConfig({ 'version': 1, 'disable_existing_loggers': False, 'formatters': { 'standard': { 'format': '%(asctime)s [%(name)s] %(levelname)s ' '%(processName)s: %(message)s' } }, 'handlers': { 'file': { 'level': conf.LOG_FILE_LEVEL, 'filename': conf.LOG_FILE, 'class': 'logging.FileHandler', 'formatter': 'standard', }, 'stdout': { 'level': conf.LOG_STDOUT_LEVEL, 'class': 'pyphi.log.TqdmHandler', 'formatter': 'standard', } }, 'root': { 'level': 'DEBUG', 'handlers': (['file'] if conf.LOG_FILE_LEVEL else []) + (['stdout'] if conf.LOG_STDOUT_LEVEL else []) } })
python
def configure_logging(conf): """Reconfigure PyPhi logging based on the current configuration.""" logging.config.dictConfig({ 'version': 1, 'disable_existing_loggers': False, 'formatters': { 'standard': { 'format': '%(asctime)s [%(name)s] %(levelname)s ' '%(processName)s: %(message)s' } }, 'handlers': { 'file': { 'level': conf.LOG_FILE_LEVEL, 'filename': conf.LOG_FILE, 'class': 'logging.FileHandler', 'formatter': 'standard', }, 'stdout': { 'level': conf.LOG_STDOUT_LEVEL, 'class': 'pyphi.log.TqdmHandler', 'formatter': 'standard', } }, 'root': { 'level': 'DEBUG', 'handlers': (['file'] if conf.LOG_FILE_LEVEL else []) + (['stdout'] if conf.LOG_STDOUT_LEVEL else []) } })
[ "def", "configure_logging", "(", "conf", ")", ":", "logging", ".", "config", ".", "dictConfig", "(", "{", "'version'", ":", "1", ",", "'disable_existing_loggers'", ":", "False", ",", "'formatters'", ":", "{", "'standard'", ":", "{", "'format'", ":", "'%(asctime)s [%(name)s] %(levelname)s '", "'%(processName)s: %(message)s'", "}", "}", ",", "'handlers'", ":", "{", "'file'", ":", "{", "'level'", ":", "conf", ".", "LOG_FILE_LEVEL", ",", "'filename'", ":", "conf", ".", "LOG_FILE", ",", "'class'", ":", "'logging.FileHandler'", ",", "'formatter'", ":", "'standard'", ",", "}", ",", "'stdout'", ":", "{", "'level'", ":", "conf", ".", "LOG_STDOUT_LEVEL", ",", "'class'", ":", "'pyphi.log.TqdmHandler'", ",", "'formatter'", ":", "'standard'", ",", "}", "}", ",", "'root'", ":", "{", "'level'", ":", "'DEBUG'", ",", "'handlers'", ":", "(", "[", "'file'", "]", "if", "conf", ".", "LOG_FILE_LEVEL", "else", "[", "]", ")", "+", "(", "[", "'stdout'", "]", "if", "conf", ".", "LOG_STDOUT_LEVEL", "else", "[", "]", ")", "}", "}", ")" ]
Reconfigure PyPhi logging based on the current configuration.
[ "Reconfigure", "PyPhi", "logging", "based", "on", "the", "current", "configuration", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/conf.py#L328-L357
train
wmayner/pyphi
pyphi/conf.py
Option._validate
def _validate(self, value): """Validate the new value.""" if self.values and value not in self.values: raise ValueError( '{} is not a valid value for {}'.format(value, self.name))
python
def _validate(self, value): """Validate the new value.""" if self.values and value not in self.values: raise ValueError( '{} is not a valid value for {}'.format(value, self.name))
[ "def", "_validate", "(", "self", ",", "value", ")", ":", "if", "self", ".", "values", "and", "value", "not", "in", "self", ".", "values", ":", "raise", "ValueError", "(", "'{} is not a valid value for {}'", ".", "format", "(", "value", ",", "self", ".", "name", ")", ")" ]
Validate the new value.
[ "Validate", "the", "new", "value", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/conf.py#L201-L205
train
wmayner/pyphi
pyphi/conf.py
Config.options
def options(cls): """Return a dictionary of the ``Option`` objects for this config.""" return {k: v for k, v in cls.__dict__.items() if isinstance(v, Option)}
python
def options(cls): """Return a dictionary of the ``Option`` objects for this config.""" return {k: v for k, v in cls.__dict__.items() if isinstance(v, Option)}
[ "def", "options", "(", "cls", ")", ":", "return", "{", "k", ":", "v", "for", "k", ",", "v", "in", "cls", ".", "__dict__", ".", "items", "(", ")", "if", "isinstance", "(", "v", ",", "Option", ")", "}" ]
Return a dictionary of the ``Option`` objects for this config.
[ "Return", "a", "dictionary", "of", "the", "Option", "objects", "for", "this", "config", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/conf.py#L262-L264
train
wmayner/pyphi
pyphi/conf.py
Config.defaults
def defaults(self): """Return the default values of this configuration.""" return {k: v.default for k, v in self.options().items()}
python
def defaults(self): """Return the default values of this configuration.""" return {k: v.default for k, v in self.options().items()}
[ "def", "defaults", "(", "self", ")", ":", "return", "{", "k", ":", "v", ".", "default", "for", "k", ",", "v", "in", "self", ".", "options", "(", ")", ".", "items", "(", ")", "}" ]
Return the default values of this configuration.
[ "Return", "the", "default", "values", "of", "this", "configuration", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/conf.py#L266-L268
train
wmayner/pyphi
pyphi/conf.py
Config.load_dict
def load_dict(self, dct): """Load a dictionary of configuration values.""" for k, v in dct.items(): setattr(self, k, v)
python
def load_dict(self, dct): """Load a dictionary of configuration values.""" for k, v in dct.items(): setattr(self, k, v)
[ "def", "load_dict", "(", "self", ",", "dct", ")", ":", "for", "k", ",", "v", "in", "dct", ".", "items", "(", ")", ":", "setattr", "(", "self", ",", "k", ",", "v", ")" ]
Load a dictionary of configuration values.
[ "Load", "a", "dictionary", "of", "configuration", "values", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/conf.py#L270-L273
train
wmayner/pyphi
pyphi/conf.py
Config.load_file
def load_file(self, filename): """Load config from a YAML file.""" filename = os.path.abspath(filename) with open(filename) as f: self.load_dict(yaml.load(f)) self._loaded_files.append(filename)
python
def load_file(self, filename): """Load config from a YAML file.""" filename = os.path.abspath(filename) with open(filename) as f: self.load_dict(yaml.load(f)) self._loaded_files.append(filename)
[ "def", "load_file", "(", "self", ",", "filename", ")", ":", "filename", "=", "os", ".", "path", ".", "abspath", "(", "filename", ")", "with", "open", "(", "filename", ")", "as", "f", ":", "self", ".", "load_dict", "(", "yaml", ".", "load", "(", "f", ")", ")", "self", ".", "_loaded_files", ".", "append", "(", "filename", ")" ]
Load config from a YAML file.
[ "Load", "config", "from", "a", "YAML", "file", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/conf.py#L275-L282
train
wmayner/pyphi
pyphi/conf.py
PyphiConfig.log
def log(self): """Log current settings.""" log.info('PyPhi v%s', __about__.__version__) if self._loaded_files: log.info('Loaded configuration from %s', self._loaded_files) else: log.info('Using default configuration (no configuration file ' 'provided)') log.info('Current PyPhi configuration:\n %s', str(self))
python
def log(self): """Log current settings.""" log.info('PyPhi v%s', __about__.__version__) if self._loaded_files: log.info('Loaded configuration from %s', self._loaded_files) else: log.info('Using default configuration (no configuration file ' 'provided)') log.info('Current PyPhi configuration:\n %s', str(self))
[ "def", "log", "(", "self", ")", ":", "log", ".", "info", "(", "'PyPhi v%s'", ",", "__about__", ".", "__version__", ")", "if", "self", ".", "_loaded_files", ":", "log", ".", "info", "(", "'Loaded configuration from %s'", ",", "self", ".", "_loaded_files", ")", "else", ":", "log", ".", "info", "(", "'Using default configuration (no configuration file '", "'provided)'", ")", "log", ".", "info", "(", "'Current PyPhi configuration:\\n %s'", ",", "str", "(", "self", ")", ")" ]
Log current settings.
[ "Log", "current", "settings", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/conf.py#L636-L644
train
wmayner/pyphi
pyphi/convert.py
be2le_state_by_state
def be2le_state_by_state(tpm): """Convert a state-by-state TPM from big-endian to little-endian or vice versa. Args: tpm (np.ndarray): A state-by-state TPM. Returns: np.ndarray: The state-by-state TPM in the other indexing format. Example: >>> tpm = np.arange(16).reshape([4, 4]) >>> be2le_state_by_state(tpm) array([[ 0., 1., 2., 3.], [ 8., 9., 10., 11.], [ 4., 5., 6., 7.], [12., 13., 14., 15.]]) """ le = np.empty(tpm.shape) N = tpm.shape[0] n = int(log2(N)) for i in range(N): le[i, :] = tpm[be2le(i, n), :] return le
python
def be2le_state_by_state(tpm): """Convert a state-by-state TPM from big-endian to little-endian or vice versa. Args: tpm (np.ndarray): A state-by-state TPM. Returns: np.ndarray: The state-by-state TPM in the other indexing format. Example: >>> tpm = np.arange(16).reshape([4, 4]) >>> be2le_state_by_state(tpm) array([[ 0., 1., 2., 3.], [ 8., 9., 10., 11.], [ 4., 5., 6., 7.], [12., 13., 14., 15.]]) """ le = np.empty(tpm.shape) N = tpm.shape[0] n = int(log2(N)) for i in range(N): le[i, :] = tpm[be2le(i, n), :] return le
[ "def", "be2le_state_by_state", "(", "tpm", ")", ":", "le", "=", "np", ".", "empty", "(", "tpm", ".", "shape", ")", "N", "=", "tpm", ".", "shape", "[", "0", "]", "n", "=", "int", "(", "log2", "(", "N", ")", ")", "for", "i", "in", "range", "(", "N", ")", ":", "le", "[", "i", ",", ":", "]", "=", "tpm", "[", "be2le", "(", "i", ",", "n", ")", ",", ":", "]", "return", "le" ]
Convert a state-by-state TPM from big-endian to little-endian or vice versa. Args: tpm (np.ndarray): A state-by-state TPM. Returns: np.ndarray: The state-by-state TPM in the other indexing format. Example: >>> tpm = np.arange(16).reshape([4, 4]) >>> be2le_state_by_state(tpm) array([[ 0., 1., 2., 3.], [ 8., 9., 10., 11.], [ 4., 5., 6., 7.], [12., 13., 14., 15.]])
[ "Convert", "a", "state", "-", "by", "-", "state", "TPM", "from", "big", "-", "endian", "to", "little", "-", "endian", "or", "vice", "versa", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/convert.py#L147-L170
train
wmayner/pyphi
pyphi/convert.py
to_multidimensional
def to_multidimensional(tpm): """Reshape a state-by-node TPM to the multidimensional form. See documentation for the |Network| object for more information on TPM formats. """ # Cast to np.array. tpm = np.array(tpm) # Get the number of nodes. N = tpm.shape[-1] # Reshape. We use Fortran ordering here so that the rows use the # little-endian convention (least-significant bits correspond to low-index # nodes). Note that this does not change the actual memory layout (C- or # Fortran-contiguous), so there is no performance loss. return tpm.reshape([2] * N + [N], order="F").astype(float)
python
def to_multidimensional(tpm): """Reshape a state-by-node TPM to the multidimensional form. See documentation for the |Network| object for more information on TPM formats. """ # Cast to np.array. tpm = np.array(tpm) # Get the number of nodes. N = tpm.shape[-1] # Reshape. We use Fortran ordering here so that the rows use the # little-endian convention (least-significant bits correspond to low-index # nodes). Note that this does not change the actual memory layout (C- or # Fortran-contiguous), so there is no performance loss. return tpm.reshape([2] * N + [N], order="F").astype(float)
[ "def", "to_multidimensional", "(", "tpm", ")", ":", "# Cast to np.array.", "tpm", "=", "np", ".", "array", "(", "tpm", ")", "# Get the number of nodes.", "N", "=", "tpm", ".", "shape", "[", "-", "1", "]", "# Reshape. We use Fortran ordering here so that the rows use the", "# little-endian convention (least-significant bits correspond to low-index", "# nodes). Note that this does not change the actual memory layout (C- or", "# Fortran-contiguous), so there is no performance loss.", "return", "tpm", ".", "reshape", "(", "[", "2", "]", "*", "N", "+", "[", "N", "]", ",", "order", "=", "\"F\"", ")", ".", "astype", "(", "float", ")" ]
Reshape a state-by-node TPM to the multidimensional form. See documentation for the |Network| object for more information on TPM formats.
[ "Reshape", "a", "state", "-", "by", "-", "node", "TPM", "to", "the", "multidimensional", "form", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/convert.py#L176-L190
train
wmayner/pyphi
pyphi/convert.py
state_by_state2state_by_node
def state_by_state2state_by_node(tpm): """Convert a state-by-state TPM to a state-by-node TPM. .. danger:: Many nondeterministic state-by-state TPMs can be represented by a single a state-by-state TPM. However, the mapping can be made to be one-to-one if we assume the state-by-state TPM is conditionally independent, as this function does. **If the given TPM is not conditionally independent, the conditional dependencies will be silently lost.** .. note:: The indices of the rows and columns of the state-by-state TPM are assumed to follow the little-endian convention. The indices of the rows of the resulting state-by-node TPM also follow the little-endian convention. See the documentation on PyPhi the :ref:`tpm-conventions` more information. Args: tpm (list[list] or np.ndarray): A square state-by-state TPM with row and column indices following the little-endian convention. Returns: np.ndarray: A state-by-node TPM, with row indices following the little-endian convention. Example: >>> tpm = np.array([[0.5, 0.5, 0.0, 0.0], ... [0.0, 1.0, 0.0, 0.0], ... [0.0, 0.2, 0.0, 0.8], ... [0.0, 0.3, 0.7, 0.0]]) >>> state_by_state2state_by_node(tpm) array([[[0.5, 0. ], [1. , 0.8]], <BLANKLINE> [[1. , 0. ], [0.3, 0.7]]]) """ # Cast to np.array. tpm = np.array(tpm) # Get the number of states from the length of one side of the TPM. S = tpm.shape[-1] # Get the number of nodes from the number of states. N = int(log2(S)) # Initialize the new state-by node TPM. sbn_tpm = np.zeros(([2] * N + [N])) # Map indices to state-tuples with the little-endian convention. states = {i: le_index2state(i, N) for i in range(S)} # Get an array for each node with 1 in positions that correspond to that # node being on in the next state, and a 0 otherwise. node_on = np.array([[states[i][n] for i in range(S)] for n in range(N)]) on_probabilities = [tpm * node_on[n] for n in range(N)] for i, state in states.items(): # Get the probability of each node being on given the previous state i, # i.e., a row of the state-by-node TPM. # Assign that row to the ith state in the state-by-node TPM. sbn_tpm[state] = [np.sum(on_probabilities[n][i]) for n in range(N)] return sbn_tpm
python
def state_by_state2state_by_node(tpm): """Convert a state-by-state TPM to a state-by-node TPM. .. danger:: Many nondeterministic state-by-state TPMs can be represented by a single a state-by-state TPM. However, the mapping can be made to be one-to-one if we assume the state-by-state TPM is conditionally independent, as this function does. **If the given TPM is not conditionally independent, the conditional dependencies will be silently lost.** .. note:: The indices of the rows and columns of the state-by-state TPM are assumed to follow the little-endian convention. The indices of the rows of the resulting state-by-node TPM also follow the little-endian convention. See the documentation on PyPhi the :ref:`tpm-conventions` more information. Args: tpm (list[list] or np.ndarray): A square state-by-state TPM with row and column indices following the little-endian convention. Returns: np.ndarray: A state-by-node TPM, with row indices following the little-endian convention. Example: >>> tpm = np.array([[0.5, 0.5, 0.0, 0.0], ... [0.0, 1.0, 0.0, 0.0], ... [0.0, 0.2, 0.0, 0.8], ... [0.0, 0.3, 0.7, 0.0]]) >>> state_by_state2state_by_node(tpm) array([[[0.5, 0. ], [1. , 0.8]], <BLANKLINE> [[1. , 0. ], [0.3, 0.7]]]) """ # Cast to np.array. tpm = np.array(tpm) # Get the number of states from the length of one side of the TPM. S = tpm.shape[-1] # Get the number of nodes from the number of states. N = int(log2(S)) # Initialize the new state-by node TPM. sbn_tpm = np.zeros(([2] * N + [N])) # Map indices to state-tuples with the little-endian convention. states = {i: le_index2state(i, N) for i in range(S)} # Get an array for each node with 1 in positions that correspond to that # node being on in the next state, and a 0 otherwise. node_on = np.array([[states[i][n] for i in range(S)] for n in range(N)]) on_probabilities = [tpm * node_on[n] for n in range(N)] for i, state in states.items(): # Get the probability of each node being on given the previous state i, # i.e., a row of the state-by-node TPM. # Assign that row to the ith state in the state-by-node TPM. sbn_tpm[state] = [np.sum(on_probabilities[n][i]) for n in range(N)] return sbn_tpm
[ "def", "state_by_state2state_by_node", "(", "tpm", ")", ":", "# Cast to np.array.", "tpm", "=", "np", ".", "array", "(", "tpm", ")", "# Get the number of states from the length of one side of the TPM.", "S", "=", "tpm", ".", "shape", "[", "-", "1", "]", "# Get the number of nodes from the number of states.", "N", "=", "int", "(", "log2", "(", "S", ")", ")", "# Initialize the new state-by node TPM.", "sbn_tpm", "=", "np", ".", "zeros", "(", "(", "[", "2", "]", "*", "N", "+", "[", "N", "]", ")", ")", "# Map indices to state-tuples with the little-endian convention.", "states", "=", "{", "i", ":", "le_index2state", "(", "i", ",", "N", ")", "for", "i", "in", "range", "(", "S", ")", "}", "# Get an array for each node with 1 in positions that correspond to that", "# node being on in the next state, and a 0 otherwise.", "node_on", "=", "np", ".", "array", "(", "[", "[", "states", "[", "i", "]", "[", "n", "]", "for", "i", "in", "range", "(", "S", ")", "]", "for", "n", "in", "range", "(", "N", ")", "]", ")", "on_probabilities", "=", "[", "tpm", "*", "node_on", "[", "n", "]", "for", "n", "in", "range", "(", "N", ")", "]", "for", "i", ",", "state", "in", "states", ".", "items", "(", ")", ":", "# Get the probability of each node being on given the previous state i,", "# i.e., a row of the state-by-node TPM.", "# Assign that row to the ith state in the state-by-node TPM.", "sbn_tpm", "[", "state", "]", "=", "[", "np", ".", "sum", "(", "on_probabilities", "[", "n", "]", "[", "i", "]", ")", "for", "n", "in", "range", "(", "N", ")", "]", "return", "sbn_tpm" ]
Convert a state-by-state TPM to a state-by-node TPM. .. danger:: Many nondeterministic state-by-state TPMs can be represented by a single a state-by-state TPM. However, the mapping can be made to be one-to-one if we assume the state-by-state TPM is conditionally independent, as this function does. **If the given TPM is not conditionally independent, the conditional dependencies will be silently lost.** .. note:: The indices of the rows and columns of the state-by-state TPM are assumed to follow the little-endian convention. The indices of the rows of the resulting state-by-node TPM also follow the little-endian convention. See the documentation on PyPhi the :ref:`tpm-conventions` more information. Args: tpm (list[list] or np.ndarray): A square state-by-state TPM with row and column indices following the little-endian convention. Returns: np.ndarray: A state-by-node TPM, with row indices following the little-endian convention. Example: >>> tpm = np.array([[0.5, 0.5, 0.0, 0.0], ... [0.0, 1.0, 0.0, 0.0], ... [0.0, 0.2, 0.0, 0.8], ... [0.0, 0.3, 0.7, 0.0]]) >>> state_by_state2state_by_node(tpm) array([[[0.5, 0. ], [1. , 0.8]], <BLANKLINE> [[1. , 0. ], [0.3, 0.7]]])
[ "Convert", "a", "state", "-", "by", "-", "state", "TPM", "to", "a", "state", "-", "by", "-", "node", "TPM", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/convert.py#L207-L264
train
wmayner/pyphi
pyphi/convert.py
state_by_node2state_by_state
def state_by_node2state_by_state(tpm): """Convert a state-by-node TPM to a state-by-state TPM. .. important:: A nondeterministic state-by-node TPM can have more than one representation as a state-by-state TPM. However, the mapping can be made to be one-to-one if we assume the TPMs to be conditionally independent. Therefore, **this function returns the corresponding conditionally independent state-by-state TPM.** .. note:: The indices of the rows of the state-by-node TPM are assumed to follow the little-endian convention, while the indices of the columns follow the big-endian convention. The indices of the rows and columns of the resulting state-by-state TPM both follow the big-endian convention. See the documentation on PyPhi :ref:`tpm-conventions` for more info. Args: tpm (list[list] or np.ndarray): A state-by-node TPM with row indices following the little-endian convention and column indices following the big-endian convention. Returns: np.ndarray: A state-by-state TPM, with both row and column indices following the big-endian convention. >>> tpm = np.array([[1, 1, 0], ... [0, 0, 1], ... [0, 1, 1], ... [1, 0, 0], ... [0, 0, 1], ... [1, 0, 0], ... [1, 1, 1], ... [1, 0, 1]]) >>> state_by_node2state_by_state(tpm) array([[0., 0., 0., 1., 0., 0., 0., 0.], [0., 0., 0., 0., 1., 0., 0., 0.], [0., 0., 0., 0., 0., 0., 1., 0.], [0., 1., 0., 0., 0., 0., 0., 0.], [0., 0., 0., 0., 1., 0., 0., 0.], [0., 1., 0., 0., 0., 0., 0., 0.], [0., 0., 0., 0., 0., 0., 0., 1.], [0., 0., 0., 0., 0., 1., 0., 0.]]) """ # Cast to np.array. tpm = np.array(tpm) # Convert to multidimensional form. tpm = to_multidimensional(tpm) # Get the number of nodes from the last dimension of the TPM. N = tpm.shape[-1] # Get the number of states. S = 2**N # Initialize the state-by-state TPM. sbs_tpm = np.zeros((S, S)) if not np.any(np.logical_and(tpm < 1, tpm > 0)): # TPM is deterministic. for previous_state_index in range(S): # Use the little-endian convention to get the row and column # indices. previous_state = le_index2state(previous_state_index, N) current_state_index = state2le_index(tpm[previous_state]) sbs_tpm[previous_state_index, current_state_index] = 1 else: # TPM is nondeterministic. for previous_state_index in range(S): # Use the little-endian convention to get the row and column # indices. previous_state = le_index2state(previous_state_index, N) marginal_tpm = tpm[previous_state] for current_state_index in range(S): current_state = np.array( [i for i in le_index2state(current_state_index, N)]) sbs_tpm[previous_state_index, current_state_index] = ( np.prod(marginal_tpm[current_state == 1]) * np.prod(1 - marginal_tpm[current_state == 0])) return sbs_tpm
python
def state_by_node2state_by_state(tpm): """Convert a state-by-node TPM to a state-by-state TPM. .. important:: A nondeterministic state-by-node TPM can have more than one representation as a state-by-state TPM. However, the mapping can be made to be one-to-one if we assume the TPMs to be conditionally independent. Therefore, **this function returns the corresponding conditionally independent state-by-state TPM.** .. note:: The indices of the rows of the state-by-node TPM are assumed to follow the little-endian convention, while the indices of the columns follow the big-endian convention. The indices of the rows and columns of the resulting state-by-state TPM both follow the big-endian convention. See the documentation on PyPhi :ref:`tpm-conventions` for more info. Args: tpm (list[list] or np.ndarray): A state-by-node TPM with row indices following the little-endian convention and column indices following the big-endian convention. Returns: np.ndarray: A state-by-state TPM, with both row and column indices following the big-endian convention. >>> tpm = np.array([[1, 1, 0], ... [0, 0, 1], ... [0, 1, 1], ... [1, 0, 0], ... [0, 0, 1], ... [1, 0, 0], ... [1, 1, 1], ... [1, 0, 1]]) >>> state_by_node2state_by_state(tpm) array([[0., 0., 0., 1., 0., 0., 0., 0.], [0., 0., 0., 0., 1., 0., 0., 0.], [0., 0., 0., 0., 0., 0., 1., 0.], [0., 1., 0., 0., 0., 0., 0., 0.], [0., 0., 0., 0., 1., 0., 0., 0.], [0., 1., 0., 0., 0., 0., 0., 0.], [0., 0., 0., 0., 0., 0., 0., 1.], [0., 0., 0., 0., 0., 1., 0., 0.]]) """ # Cast to np.array. tpm = np.array(tpm) # Convert to multidimensional form. tpm = to_multidimensional(tpm) # Get the number of nodes from the last dimension of the TPM. N = tpm.shape[-1] # Get the number of states. S = 2**N # Initialize the state-by-state TPM. sbs_tpm = np.zeros((S, S)) if not np.any(np.logical_and(tpm < 1, tpm > 0)): # TPM is deterministic. for previous_state_index in range(S): # Use the little-endian convention to get the row and column # indices. previous_state = le_index2state(previous_state_index, N) current_state_index = state2le_index(tpm[previous_state]) sbs_tpm[previous_state_index, current_state_index] = 1 else: # TPM is nondeterministic. for previous_state_index in range(S): # Use the little-endian convention to get the row and column # indices. previous_state = le_index2state(previous_state_index, N) marginal_tpm = tpm[previous_state] for current_state_index in range(S): current_state = np.array( [i for i in le_index2state(current_state_index, N)]) sbs_tpm[previous_state_index, current_state_index] = ( np.prod(marginal_tpm[current_state == 1]) * np.prod(1 - marginal_tpm[current_state == 0])) return sbs_tpm
[ "def", "state_by_node2state_by_state", "(", "tpm", ")", ":", "# Cast to np.array.", "tpm", "=", "np", ".", "array", "(", "tpm", ")", "# Convert to multidimensional form.", "tpm", "=", "to_multidimensional", "(", "tpm", ")", "# Get the number of nodes from the last dimension of the TPM.", "N", "=", "tpm", ".", "shape", "[", "-", "1", "]", "# Get the number of states.", "S", "=", "2", "**", "N", "# Initialize the state-by-state TPM.", "sbs_tpm", "=", "np", ".", "zeros", "(", "(", "S", ",", "S", ")", ")", "if", "not", "np", ".", "any", "(", "np", ".", "logical_and", "(", "tpm", "<", "1", ",", "tpm", ">", "0", ")", ")", ":", "# TPM is deterministic.", "for", "previous_state_index", "in", "range", "(", "S", ")", ":", "# Use the little-endian convention to get the row and column", "# indices.", "previous_state", "=", "le_index2state", "(", "previous_state_index", ",", "N", ")", "current_state_index", "=", "state2le_index", "(", "tpm", "[", "previous_state", "]", ")", "sbs_tpm", "[", "previous_state_index", ",", "current_state_index", "]", "=", "1", "else", ":", "# TPM is nondeterministic.", "for", "previous_state_index", "in", "range", "(", "S", ")", ":", "# Use the little-endian convention to get the row and column", "# indices.", "previous_state", "=", "le_index2state", "(", "previous_state_index", ",", "N", ")", "marginal_tpm", "=", "tpm", "[", "previous_state", "]", "for", "current_state_index", "in", "range", "(", "S", ")", ":", "current_state", "=", "np", ".", "array", "(", "[", "i", "for", "i", "in", "le_index2state", "(", "current_state_index", ",", "N", ")", "]", ")", "sbs_tpm", "[", "previous_state_index", ",", "current_state_index", "]", "=", "(", "np", ".", "prod", "(", "marginal_tpm", "[", "current_state", "==", "1", "]", ")", "*", "np", ".", "prod", "(", "1", "-", "marginal_tpm", "[", "current_state", "==", "0", "]", ")", ")", "return", "sbs_tpm" ]
Convert a state-by-node TPM to a state-by-state TPM. .. important:: A nondeterministic state-by-node TPM can have more than one representation as a state-by-state TPM. However, the mapping can be made to be one-to-one if we assume the TPMs to be conditionally independent. Therefore, **this function returns the corresponding conditionally independent state-by-state TPM.** .. note:: The indices of the rows of the state-by-node TPM are assumed to follow the little-endian convention, while the indices of the columns follow the big-endian convention. The indices of the rows and columns of the resulting state-by-state TPM both follow the big-endian convention. See the documentation on PyPhi :ref:`tpm-conventions` for more info. Args: tpm (list[list] or np.ndarray): A state-by-node TPM with row indices following the little-endian convention and column indices following the big-endian convention. Returns: np.ndarray: A state-by-state TPM, with both row and column indices following the big-endian convention. >>> tpm = np.array([[1, 1, 0], ... [0, 0, 1], ... [0, 1, 1], ... [1, 0, 0], ... [0, 0, 1], ... [1, 0, 0], ... [1, 1, 1], ... [1, 0, 1]]) >>> state_by_node2state_by_state(tpm) array([[0., 0., 0., 1., 0., 0., 0., 0.], [0., 0., 0., 0., 1., 0., 0., 0.], [0., 0., 0., 0., 0., 0., 1., 0.], [0., 1., 0., 0., 0., 0., 0., 0.], [0., 0., 0., 0., 1., 0., 0., 0.], [0., 1., 0., 0., 0., 0., 0., 0.], [0., 0., 0., 0., 0., 0., 0., 1.], [0., 0., 0., 0., 0., 1., 0., 0.]])
[ "Convert", "a", "state", "-", "by", "-", "node", "TPM", "to", "a", "state", "-", "by", "-", "state", "TPM", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/convert.py#L270-L345
train
wmayner/pyphi
profiling/code_to_profile.py
load_json_network
def load_json_network(json_dict): """Load a network from a json file""" network = pyphi.Network.from_json(json_dict['network']) state = json_dict['state'] return (network, state)
python
def load_json_network(json_dict): """Load a network from a json file""" network = pyphi.Network.from_json(json_dict['network']) state = json_dict['state'] return (network, state)
[ "def", "load_json_network", "(", "json_dict", ")", ":", "network", "=", "pyphi", ".", "Network", ".", "from_json", "(", "json_dict", "[", "'network'", "]", ")", "state", "=", "json_dict", "[", "'state'", "]", "return", "(", "network", ",", "state", ")" ]
Load a network from a json file
[ "Load", "a", "network", "from", "a", "json", "file" ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/profiling/code_to_profile.py#L35-L39
train
wmayner/pyphi
profiling/code_to_profile.py
all_network_files
def all_network_files(): """All network files""" # TODO: list explicitly since some are missing? network_types = [ 'AND-circle', 'MAJ-specialized', 'MAJ-complete', 'iit-3.0-modular' ] network_sizes = range(5, 8) network_files = [] for n in network_sizes: for t in network_types: network_files.append('{}-{}'.format(n, t)) return network_files
python
def all_network_files(): """All network files""" # TODO: list explicitly since some are missing? network_types = [ 'AND-circle', 'MAJ-specialized', 'MAJ-complete', 'iit-3.0-modular' ] network_sizes = range(5, 8) network_files = [] for n in network_sizes: for t in network_types: network_files.append('{}-{}'.format(n, t)) return network_files
[ "def", "all_network_files", "(", ")", ":", "# TODO: list explicitly since some are missing?", "network_types", "=", "[", "'AND-circle'", ",", "'MAJ-specialized'", ",", "'MAJ-complete'", ",", "'iit-3.0-modular'", "]", "network_sizes", "=", "range", "(", "5", ",", "8", ")", "network_files", "=", "[", "]", "for", "n", "in", "network_sizes", ":", "for", "t", "in", "network_types", ":", "network_files", ".", "append", "(", "'{}-{}'", ".", "format", "(", "n", ",", "t", ")", ")", "return", "network_files" ]
All network files
[ "All", "network", "files" ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/profiling/code_to_profile.py#L42-L56
train
wmayner/pyphi
profiling/code_to_profile.py
profile_network
def profile_network(filename): """Profile a network. Saves PyPhi results, pstats, and logs to respective directories. """ log = logging.getLogger(filename) logfile = os.path.join(LOGS, filename + '.log') os.makedirs(os.path.dirname(logfile), exist_ok=True) handler = logging.FileHandler(logfile) handler.setFormatter(formatter) log.addHandler(handler) log.setLevel(logging.INFO) try: with open(os.path.join(NETWORKS, filename + '.json')) as f: network, state = load_json_network(json.load(f)) log.info('Profiling %s...', filename) log.info('PyPhi configuration:\n%s', pyphi.config.get_config_string()) start = time() pr = cProfile.Profile() pr.enable() results = tuple(pyphi.compute.complexes(network, state)) pr.disable() end = time() pstatsfile = os.path.join(PSTATS, filename + '.pstats') os.makedirs(os.path.dirname(pstatsfile), exist_ok=True) pr.dump_stats(pstatsfile) log.info('Finished in %i seconds.', end - start) resultfile = os.path.join(RESULTS, filename + '-results.pkl') os.makedirs(os.path.dirname(resultfile), exist_ok=True) with open(resultfile, 'wb') as f: pickle.dump(results, f) except Exception as e: log.error(e) raise e
python
def profile_network(filename): """Profile a network. Saves PyPhi results, pstats, and logs to respective directories. """ log = logging.getLogger(filename) logfile = os.path.join(LOGS, filename + '.log') os.makedirs(os.path.dirname(logfile), exist_ok=True) handler = logging.FileHandler(logfile) handler.setFormatter(formatter) log.addHandler(handler) log.setLevel(logging.INFO) try: with open(os.path.join(NETWORKS, filename + '.json')) as f: network, state = load_json_network(json.load(f)) log.info('Profiling %s...', filename) log.info('PyPhi configuration:\n%s', pyphi.config.get_config_string()) start = time() pr = cProfile.Profile() pr.enable() results = tuple(pyphi.compute.complexes(network, state)) pr.disable() end = time() pstatsfile = os.path.join(PSTATS, filename + '.pstats') os.makedirs(os.path.dirname(pstatsfile), exist_ok=True) pr.dump_stats(pstatsfile) log.info('Finished in %i seconds.', end - start) resultfile = os.path.join(RESULTS, filename + '-results.pkl') os.makedirs(os.path.dirname(resultfile), exist_ok=True) with open(resultfile, 'wb') as f: pickle.dump(results, f) except Exception as e: log.error(e) raise e
[ "def", "profile_network", "(", "filename", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "filename", ")", "logfile", "=", "os", ".", "path", ".", "join", "(", "LOGS", ",", "filename", "+", "'.log'", ")", "os", ".", "makedirs", "(", "os", ".", "path", ".", "dirname", "(", "logfile", ")", ",", "exist_ok", "=", "True", ")", "handler", "=", "logging", ".", "FileHandler", "(", "logfile", ")", "handler", ".", "setFormatter", "(", "formatter", ")", "log", ".", "addHandler", "(", "handler", ")", "log", ".", "setLevel", "(", "logging", ".", "INFO", ")", "try", ":", "with", "open", "(", "os", ".", "path", ".", "join", "(", "NETWORKS", ",", "filename", "+", "'.json'", ")", ")", "as", "f", ":", "network", ",", "state", "=", "load_json_network", "(", "json", ".", "load", "(", "f", ")", ")", "log", ".", "info", "(", "'Profiling %s...'", ",", "filename", ")", "log", ".", "info", "(", "'PyPhi configuration:\\n%s'", ",", "pyphi", ".", "config", ".", "get_config_string", "(", ")", ")", "start", "=", "time", "(", ")", "pr", "=", "cProfile", ".", "Profile", "(", ")", "pr", ".", "enable", "(", ")", "results", "=", "tuple", "(", "pyphi", ".", "compute", ".", "complexes", "(", "network", ",", "state", ")", ")", "pr", ".", "disable", "(", ")", "end", "=", "time", "(", ")", "pstatsfile", "=", "os", ".", "path", ".", "join", "(", "PSTATS", ",", "filename", "+", "'.pstats'", ")", "os", ".", "makedirs", "(", "os", ".", "path", ".", "dirname", "(", "pstatsfile", ")", ",", "exist_ok", "=", "True", ")", "pr", ".", "dump_stats", "(", "pstatsfile", ")", "log", ".", "info", "(", "'Finished in %i seconds.'", ",", "end", "-", "start", ")", "resultfile", "=", "os", ".", "path", ".", "join", "(", "RESULTS", ",", "filename", "+", "'-results.pkl'", ")", "os", ".", "makedirs", "(", "os", ".", "path", ".", "dirname", "(", "resultfile", ")", ",", "exist_ok", "=", "True", ")", "with", "open", "(", "resultfile", ",", "'wb'", ")", "as", "f", ":", "pickle", ".", "dump", "(", "results", ",", "f", ")", "except", "Exception", "as", "e", ":", "log", ".", "error", "(", "e", ")", "raise", "e" ]
Profile a network. Saves PyPhi results, pstats, and logs to respective directories.
[ "Profile", "a", "network", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/profiling/code_to_profile.py#L59-L102
train
wmayner/pyphi
pyphi/timescale.py
run_tpm
def run_tpm(tpm, time_scale): """Iterate a TPM by the specified number of time steps. Args: tpm (np.ndarray): A state-by-node tpm. time_scale (int): The number of steps to run the tpm. Returns: np.ndarray """ sbs_tpm = convert.state_by_node2state_by_state(tpm) if sparse(tpm): tpm = sparse_time(sbs_tpm, time_scale) else: tpm = dense_time(sbs_tpm, time_scale) return convert.state_by_state2state_by_node(tpm)
python
def run_tpm(tpm, time_scale): """Iterate a TPM by the specified number of time steps. Args: tpm (np.ndarray): A state-by-node tpm. time_scale (int): The number of steps to run the tpm. Returns: np.ndarray """ sbs_tpm = convert.state_by_node2state_by_state(tpm) if sparse(tpm): tpm = sparse_time(sbs_tpm, time_scale) else: tpm = dense_time(sbs_tpm, time_scale) return convert.state_by_state2state_by_node(tpm)
[ "def", "run_tpm", "(", "tpm", ",", "time_scale", ")", ":", "sbs_tpm", "=", "convert", ".", "state_by_node2state_by_state", "(", "tpm", ")", "if", "sparse", "(", "tpm", ")", ":", "tpm", "=", "sparse_time", "(", "sbs_tpm", ",", "time_scale", ")", "else", ":", "tpm", "=", "dense_time", "(", "sbs_tpm", ",", "time_scale", ")", "return", "convert", ".", "state_by_state2state_by_node", "(", "tpm", ")" ]
Iterate a TPM by the specified number of time steps. Args: tpm (np.ndarray): A state-by-node tpm. time_scale (int): The number of steps to run the tpm. Returns: np.ndarray
[ "Iterate", "a", "TPM", "by", "the", "specified", "number", "of", "time", "steps", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/timescale.py#L28-L43
train
wmayner/pyphi
pyphi/timescale.py
run_cm
def run_cm(cm, time_scale): """Iterate a connectivity matrix the specified number of steps. Args: cm (np.ndarray): A connectivity matrix. time_scale (int): The number of steps to run. Returns: np.ndarray: The connectivity matrix at the new timescale. """ cm = np.linalg.matrix_power(cm, time_scale) # Round non-unitary values back to 1 cm[cm > 1] = 1 return cm
python
def run_cm(cm, time_scale): """Iterate a connectivity matrix the specified number of steps. Args: cm (np.ndarray): A connectivity matrix. time_scale (int): The number of steps to run. Returns: np.ndarray: The connectivity matrix at the new timescale. """ cm = np.linalg.matrix_power(cm, time_scale) # Round non-unitary values back to 1 cm[cm > 1] = 1 return cm
[ "def", "run_cm", "(", "cm", ",", "time_scale", ")", ":", "cm", "=", "np", ".", "linalg", ".", "matrix_power", "(", "cm", ",", "time_scale", ")", "# Round non-unitary values back to 1", "cm", "[", "cm", ">", "1", "]", "=", "1", "return", "cm" ]
Iterate a connectivity matrix the specified number of steps. Args: cm (np.ndarray): A connectivity matrix. time_scale (int): The number of steps to run. Returns: np.ndarray: The connectivity matrix at the new timescale.
[ "Iterate", "a", "connectivity", "matrix", "the", "specified", "number", "of", "steps", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/timescale.py#L46-L59
train
wmayner/pyphi
pyphi/compute/network.py
_reachable_subsystems
def _reachable_subsystems(network, indices, state): """A generator over all subsystems in a valid state.""" validate.is_network(network) # Return subsystems largest to smallest to optimize parallel # resource usage. for subset in utils.powerset(indices, nonempty=True, reverse=True): try: yield Subsystem(network, state, subset) except exceptions.StateUnreachableError: pass
python
def _reachable_subsystems(network, indices, state): """A generator over all subsystems in a valid state.""" validate.is_network(network) # Return subsystems largest to smallest to optimize parallel # resource usage. for subset in utils.powerset(indices, nonempty=True, reverse=True): try: yield Subsystem(network, state, subset) except exceptions.StateUnreachableError: pass
[ "def", "_reachable_subsystems", "(", "network", ",", "indices", ",", "state", ")", ":", "validate", ".", "is_network", "(", "network", ")", "# Return subsystems largest to smallest to optimize parallel", "# resource usage.", "for", "subset", "in", "utils", ".", "powerset", "(", "indices", ",", "nonempty", "=", "True", ",", "reverse", "=", "True", ")", ":", "try", ":", "yield", "Subsystem", "(", "network", ",", "state", ",", "subset", ")", "except", "exceptions", ".", "StateUnreachableError", ":", "pass" ]
A generator over all subsystems in a valid state.
[ "A", "generator", "over", "all", "subsystems", "in", "a", "valid", "state", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/compute/network.py#L21-L31
train
wmayner/pyphi
pyphi/compute/network.py
all_complexes
def all_complexes(network, state): """Return a generator for all complexes of the network. .. note:: Includes reducible, zero-|big_phi| complexes (which are not, strictly speaking, complexes at all). Args: network (Network): The |Network| of interest. state (tuple[int]): The state of the network (a binary tuple). Yields: SystemIrreducibilityAnalysis: A |SIA| for each |Subsystem| of the |Network|. """ engine = FindAllComplexes(subsystems(network, state)) return engine.run(config.PARALLEL_COMPLEX_EVALUATION)
python
def all_complexes(network, state): """Return a generator for all complexes of the network. .. note:: Includes reducible, zero-|big_phi| complexes (which are not, strictly speaking, complexes at all). Args: network (Network): The |Network| of interest. state (tuple[int]): The state of the network (a binary tuple). Yields: SystemIrreducibilityAnalysis: A |SIA| for each |Subsystem| of the |Network|. """ engine = FindAllComplexes(subsystems(network, state)) return engine.run(config.PARALLEL_COMPLEX_EVALUATION)
[ "def", "all_complexes", "(", "network", ",", "state", ")", ":", "engine", "=", "FindAllComplexes", "(", "subsystems", "(", "network", ",", "state", ")", ")", "return", "engine", ".", "run", "(", "config", ".", "PARALLEL_COMPLEX_EVALUATION", ")" ]
Return a generator for all complexes of the network. .. note:: Includes reducible, zero-|big_phi| complexes (which are not, strictly speaking, complexes at all). Args: network (Network): The |Network| of interest. state (tuple[int]): The state of the network (a binary tuple). Yields: SystemIrreducibilityAnalysis: A |SIA| for each |Subsystem| of the |Network|.
[ "Return", "a", "generator", "for", "all", "complexes", "of", "the", "network", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/compute/network.py#L93-L109
train
wmayner/pyphi
pyphi/compute/network.py
complexes
def complexes(network, state): """Return all irreducible complexes of the network. Args: network (Network): The |Network| of interest. state (tuple[int]): The state of the network (a binary tuple). Yields: SystemIrreducibilityAnalysis: A |SIA| for each |Subsystem| of the |Network|, excluding those with |big_phi = 0|. """ engine = FindIrreducibleComplexes(possible_complexes(network, state)) return engine.run(config.PARALLEL_COMPLEX_EVALUATION)
python
def complexes(network, state): """Return all irreducible complexes of the network. Args: network (Network): The |Network| of interest. state (tuple[int]): The state of the network (a binary tuple). Yields: SystemIrreducibilityAnalysis: A |SIA| for each |Subsystem| of the |Network|, excluding those with |big_phi = 0|. """ engine = FindIrreducibleComplexes(possible_complexes(network, state)) return engine.run(config.PARALLEL_COMPLEX_EVALUATION)
[ "def", "complexes", "(", "network", ",", "state", ")", ":", "engine", "=", "FindIrreducibleComplexes", "(", "possible_complexes", "(", "network", ",", "state", ")", ")", "return", "engine", ".", "run", "(", "config", ".", "PARALLEL_COMPLEX_EVALUATION", ")" ]
Return all irreducible complexes of the network. Args: network (Network): The |Network| of interest. state (tuple[int]): The state of the network (a binary tuple). Yields: SystemIrreducibilityAnalysis: A |SIA| for each |Subsystem| of the |Network|, excluding those with |big_phi = 0|.
[ "Return", "all", "irreducible", "complexes", "of", "the", "network", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/compute/network.py#L121-L133
train
wmayner/pyphi
pyphi/compute/network.py
major_complex
def major_complex(network, state): """Return the major complex of the network. Args: network (Network): The |Network| of interest. state (tuple[int]): The state of the network (a binary tuple). Returns: SystemIrreducibilityAnalysis: The |SIA| for the |Subsystem| with maximal |big_phi|. """ log.info('Calculating major complex...') result = complexes(network, state) if result: result = max(result) else: empty_subsystem = Subsystem(network, state, ()) result = _null_sia(empty_subsystem) log.info("Finished calculating major complex.") return result
python
def major_complex(network, state): """Return the major complex of the network. Args: network (Network): The |Network| of interest. state (tuple[int]): The state of the network (a binary tuple). Returns: SystemIrreducibilityAnalysis: The |SIA| for the |Subsystem| with maximal |big_phi|. """ log.info('Calculating major complex...') result = complexes(network, state) if result: result = max(result) else: empty_subsystem = Subsystem(network, state, ()) result = _null_sia(empty_subsystem) log.info("Finished calculating major complex.") return result
[ "def", "major_complex", "(", "network", ",", "state", ")", ":", "log", ".", "info", "(", "'Calculating major complex...'", ")", "result", "=", "complexes", "(", "network", ",", "state", ")", "if", "result", ":", "result", "=", "max", "(", "result", ")", "else", ":", "empty_subsystem", "=", "Subsystem", "(", "network", ",", "state", ",", "(", ")", ")", "result", "=", "_null_sia", "(", "empty_subsystem", ")", "log", ".", "info", "(", "\"Finished calculating major complex.\"", ")", "return", "result" ]
Return the major complex of the network. Args: network (Network): The |Network| of interest. state (tuple[int]): The state of the network (a binary tuple). Returns: SystemIrreducibilityAnalysis: The |SIA| for the |Subsystem| with maximal |big_phi|.
[ "Return", "the", "major", "complex", "of", "the", "network", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/compute/network.py#L136-L158
train
wmayner/pyphi
pyphi/compute/network.py
condensed
def condensed(network, state): """Return a list of maximal non-overlapping complexes. Args: network (Network): The |Network| of interest. state (tuple[int]): The state of the network (a binary tuple). Returns: list[SystemIrreducibilityAnalysis]: A list of |SIA| for non-overlapping complexes with maximal |big_phi| values. """ result = [] covered_nodes = set() for c in reversed(sorted(complexes(network, state))): if not any(n in covered_nodes for n in c.subsystem.node_indices): result.append(c) covered_nodes = covered_nodes | set(c.subsystem.node_indices) return result
python
def condensed(network, state): """Return a list of maximal non-overlapping complexes. Args: network (Network): The |Network| of interest. state (tuple[int]): The state of the network (a binary tuple). Returns: list[SystemIrreducibilityAnalysis]: A list of |SIA| for non-overlapping complexes with maximal |big_phi| values. """ result = [] covered_nodes = set() for c in reversed(sorted(complexes(network, state))): if not any(n in covered_nodes for n in c.subsystem.node_indices): result.append(c) covered_nodes = covered_nodes | set(c.subsystem.node_indices) return result
[ "def", "condensed", "(", "network", ",", "state", ")", ":", "result", "=", "[", "]", "covered_nodes", "=", "set", "(", ")", "for", "c", "in", "reversed", "(", "sorted", "(", "complexes", "(", "network", ",", "state", ")", ")", ")", ":", "if", "not", "any", "(", "n", "in", "covered_nodes", "for", "n", "in", "c", ".", "subsystem", ".", "node_indices", ")", ":", "result", ".", "append", "(", "c", ")", "covered_nodes", "=", "covered_nodes", "|", "set", "(", "c", ".", "subsystem", ".", "node_indices", ")", "return", "result" ]
Return a list of maximal non-overlapping complexes. Args: network (Network): The |Network| of interest. state (tuple[int]): The state of the network (a binary tuple). Returns: list[SystemIrreducibilityAnalysis]: A list of |SIA| for non-overlapping complexes with maximal |big_phi| values.
[ "Return", "a", "list", "of", "maximal", "non", "-", "overlapping", "complexes", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/compute/network.py#L161-L180
train
wmayner/pyphi
pyphi/examples.py
basic_network
def basic_network(cm=False): """A 3-node network of logic gates. Diagram:: +~~~~~~~~+ +~~~~>| A |<~~~~+ | | (OR) +~~~+ | | +~~~~~~~~+ | | | | | | v | +~+~~~~~~+ +~~~~~+~+ | B |<~~~~~~+ C | | (COPY) +~~~~~~>| (XOR) | +~~~~~~~~+ +~~~~~~~+ TPM: +----------------+---------------+ | Previous state | Current state | +----------------+---------------+ | A, B, C | A, B, C | +================+===============+ | 0, 0, 0 | 0, 0, 0 | +----------------+---------------+ | 1, 0, 0 | 0, 0, 1 | +----------------+---------------+ | 0, 1, 0 | 1, 0, 1 | +----------------+---------------+ | 1, 1, 0 | 1, 0, 0 | +----------------+---------------+ | 0, 0, 1 | 1, 1, 0 | +----------------+---------------+ | 1, 0, 1 | 1, 1, 1 | +----------------+---------------+ | 0, 1, 1 | 1, 1, 1 | +----------------+---------------+ | 1, 1, 1 | 1, 1, 0 | +----------------+---------------+ Connectivity matrix: +---+---+---+---+ | . | A | B | C | +---+---+---+---+ | A | 0 | 0 | 1 | +---+---+---+---+ | B | 1 | 0 | 1 | +---+---+---+---+ | C | 1 | 1 | 0 | +---+---+---+---+ .. note:: |CM[i][j] = 1| means that there is a directed edge |(i,j)| from node |i| to node |j| and |CM[i][j] = 0| means there is no edge from |i| to |j|. """ tpm = np.array([ [0, 0, 0], [0, 0, 1], [1, 0, 1], [1, 0, 0], [1, 1, 0], [1, 1, 1], [1, 1, 1], [1, 1, 0] ]) if cm is False: cm = np.array([ [0, 0, 1], [1, 0, 1], [1, 1, 0] ]) else: cm = None return Network(tpm, cm=cm, node_labels=LABELS[:tpm.shape[1]])
python
def basic_network(cm=False): """A 3-node network of logic gates. Diagram:: +~~~~~~~~+ +~~~~>| A |<~~~~+ | | (OR) +~~~+ | | +~~~~~~~~+ | | | | | | v | +~+~~~~~~+ +~~~~~+~+ | B |<~~~~~~+ C | | (COPY) +~~~~~~>| (XOR) | +~~~~~~~~+ +~~~~~~~+ TPM: +----------------+---------------+ | Previous state | Current state | +----------------+---------------+ | A, B, C | A, B, C | +================+===============+ | 0, 0, 0 | 0, 0, 0 | +----------------+---------------+ | 1, 0, 0 | 0, 0, 1 | +----------------+---------------+ | 0, 1, 0 | 1, 0, 1 | +----------------+---------------+ | 1, 1, 0 | 1, 0, 0 | +----------------+---------------+ | 0, 0, 1 | 1, 1, 0 | +----------------+---------------+ | 1, 0, 1 | 1, 1, 1 | +----------------+---------------+ | 0, 1, 1 | 1, 1, 1 | +----------------+---------------+ | 1, 1, 1 | 1, 1, 0 | +----------------+---------------+ Connectivity matrix: +---+---+---+---+ | . | A | B | C | +---+---+---+---+ | A | 0 | 0 | 1 | +---+---+---+---+ | B | 1 | 0 | 1 | +---+---+---+---+ | C | 1 | 1 | 0 | +---+---+---+---+ .. note:: |CM[i][j] = 1| means that there is a directed edge |(i,j)| from node |i| to node |j| and |CM[i][j] = 0| means there is no edge from |i| to |j|. """ tpm = np.array([ [0, 0, 0], [0, 0, 1], [1, 0, 1], [1, 0, 0], [1, 1, 0], [1, 1, 1], [1, 1, 1], [1, 1, 0] ]) if cm is False: cm = np.array([ [0, 0, 1], [1, 0, 1], [1, 1, 0] ]) else: cm = None return Network(tpm, cm=cm, node_labels=LABELS[:tpm.shape[1]])
[ "def", "basic_network", "(", "cm", "=", "False", ")", ":", "tpm", "=", "np", ".", "array", "(", "[", "[", "0", ",", "0", ",", "0", "]", ",", "[", "0", ",", "0", ",", "1", "]", ",", "[", "1", ",", "0", ",", "1", "]", ",", "[", "1", ",", "0", ",", "0", "]", ",", "[", "1", ",", "1", ",", "0", "]", ",", "[", "1", ",", "1", ",", "1", "]", ",", "[", "1", ",", "1", ",", "1", "]", ",", "[", "1", ",", "1", ",", "0", "]", "]", ")", "if", "cm", "is", "False", ":", "cm", "=", "np", ".", "array", "(", "[", "[", "0", ",", "0", ",", "1", "]", ",", "[", "1", ",", "0", ",", "1", "]", ",", "[", "1", ",", "1", ",", "0", "]", "]", ")", "else", ":", "cm", "=", "None", "return", "Network", "(", "tpm", ",", "cm", "=", "cm", ",", "node_labels", "=", "LABELS", "[", ":", "tpm", ".", "shape", "[", "1", "]", "]", ")" ]
A 3-node network of logic gates. Diagram:: +~~~~~~~~+ +~~~~>| A |<~~~~+ | | (OR) +~~~+ | | +~~~~~~~~+ | | | | | | v | +~+~~~~~~+ +~~~~~+~+ | B |<~~~~~~+ C | | (COPY) +~~~~~~>| (XOR) | +~~~~~~~~+ +~~~~~~~+ TPM: +----------------+---------------+ | Previous state | Current state | +----------------+---------------+ | A, B, C | A, B, C | +================+===============+ | 0, 0, 0 | 0, 0, 0 | +----------------+---------------+ | 1, 0, 0 | 0, 0, 1 | +----------------+---------------+ | 0, 1, 0 | 1, 0, 1 | +----------------+---------------+ | 1, 1, 0 | 1, 0, 0 | +----------------+---------------+ | 0, 0, 1 | 1, 1, 0 | +----------------+---------------+ | 1, 0, 1 | 1, 1, 1 | +----------------+---------------+ | 0, 1, 1 | 1, 1, 1 | +----------------+---------------+ | 1, 1, 1 | 1, 1, 0 | +----------------+---------------+ Connectivity matrix: +---+---+---+---+ | . | A | B | C | +---+---+---+---+ | A | 0 | 0 | 1 | +---+---+---+---+ | B | 1 | 0 | 1 | +---+---+---+---+ | C | 1 | 1 | 0 | +---+---+---+---+ .. note:: |CM[i][j] = 1| means that there is a directed edge |(i,j)| from node |i| to node |j| and |CM[i][j] = 0| means there is no edge from |i| to |j|.
[ "A", "3", "-", "node", "network", "of", "logic", "gates", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/examples.py#L24-L99
train
wmayner/pyphi
pyphi/examples.py
basic_noisy_selfloop_network
def basic_noisy_selfloop_network(): """Based on the basic_network, but with added selfloops and noisy edges. Nodes perform deterministic functions of their inputs, but those inputs may be flipped (i.e. what should be a 0 becomes a 1, and vice versa) with probability epsilon (eps = 0.1 here). Diagram:: +~~+ | v +~~~~~~~~+ +~~~~>| A |<~~~~+ | | (OR) +~~~+ | | +~~~~~~~~+ | | | | | | v | +~+~~~~~~+ +~~~~~+~+ | B |<~~~~~~+ C | +>| (COPY) +~~~~~~>| (XOR) |<+ | +~~~~~~~~+ +~~~~~~~+ | | | | | +~~~+ +~~~+ """ tpm = np.array([ [0.271, 0.19, 0.244], [0.919, 0.19, 0.756], [0.919, 0.91, 0.756], [0.991, 0.91, 0.244], [0.919, 0.91, 0.756], [0.991, 0.91, 0.244], [0.991, 0.99, 0.244], [0.999, 0.99, 0.756] ]) cm = np.array([ [1, 0, 1], [1, 1, 1], [1, 1, 1] ]) return Network(tpm, cm=cm)
python
def basic_noisy_selfloop_network(): """Based on the basic_network, but with added selfloops and noisy edges. Nodes perform deterministic functions of their inputs, but those inputs may be flipped (i.e. what should be a 0 becomes a 1, and vice versa) with probability epsilon (eps = 0.1 here). Diagram:: +~~+ | v +~~~~~~~~+ +~~~~>| A |<~~~~+ | | (OR) +~~~+ | | +~~~~~~~~+ | | | | | | v | +~+~~~~~~+ +~~~~~+~+ | B |<~~~~~~+ C | +>| (COPY) +~~~~~~>| (XOR) |<+ | +~~~~~~~~+ +~~~~~~~+ | | | | | +~~~+ +~~~+ """ tpm = np.array([ [0.271, 0.19, 0.244], [0.919, 0.19, 0.756], [0.919, 0.91, 0.756], [0.991, 0.91, 0.244], [0.919, 0.91, 0.756], [0.991, 0.91, 0.244], [0.991, 0.99, 0.244], [0.999, 0.99, 0.756] ]) cm = np.array([ [1, 0, 1], [1, 1, 1], [1, 1, 1] ]) return Network(tpm, cm=cm)
[ "def", "basic_noisy_selfloop_network", "(", ")", ":", "tpm", "=", "np", ".", "array", "(", "[", "[", "0.271", ",", "0.19", ",", "0.244", "]", ",", "[", "0.919", ",", "0.19", ",", "0.756", "]", ",", "[", "0.919", ",", "0.91", ",", "0.756", "]", ",", "[", "0.991", ",", "0.91", ",", "0.244", "]", ",", "[", "0.919", ",", "0.91", ",", "0.756", "]", ",", "[", "0.991", ",", "0.91", ",", "0.244", "]", ",", "[", "0.991", ",", "0.99", ",", "0.244", "]", ",", "[", "0.999", ",", "0.99", ",", "0.756", "]", "]", ")", "cm", "=", "np", ".", "array", "(", "[", "[", "1", ",", "0", ",", "1", "]", ",", "[", "1", ",", "1", ",", "1", "]", ",", "[", "1", ",", "1", ",", "1", "]", "]", ")", "return", "Network", "(", "tpm", ",", "cm", "=", "cm", ")" ]
Based on the basic_network, but with added selfloops and noisy edges. Nodes perform deterministic functions of their inputs, but those inputs may be flipped (i.e. what should be a 0 becomes a 1, and vice versa) with probability epsilon (eps = 0.1 here). Diagram:: +~~+ | v +~~~~~~~~+ +~~~~>| A |<~~~~+ | | (OR) +~~~+ | | +~~~~~~~~+ | | | | | | v | +~+~~~~~~+ +~~~~~+~+ | B |<~~~~~~+ C | +>| (COPY) +~~~~~~>| (XOR) |<+ | +~~~~~~~~+ +~~~~~~~+ | | | | | +~~~+ +~~~+
[ "Based", "on", "the", "basic_network", "but", "with", "added", "selfloops", "and", "noisy", "edges", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/examples.py#L116-L158
train
wmayner/pyphi
pyphi/examples.py
residue_network
def residue_network(): """The network for the residue example. Current and previous state are all nodes OFF. Diagram:: +~~~~~~~+ +~~~~~~~+ | A | | B | +~~>| (AND) | | (AND) |<~~+ | +~~~~~~~+ +~~~~~~~+ | | ^ ^ | | | | | | +~~~~~+ +~~~~~+ | | | | | +~~~+~~~+ +~+~~~+~+ +~~~+~~~+ | C | | D | | E | | | | | | | +~~~~~~~+ +~~~~~~~+ +~~~~~~~+ Connectivity matrix: +---+---+---+---+---+---+ | . | A | B | C | D | E | +---+---+---+---+---+---+ | A | 0 | 0 | 0 | 0 | 0 | +---+---+---+---+---+---+ | B | 0 | 0 | 0 | 0 | 0 | +---+---+---+---+---+---+ | C | 1 | 0 | 0 | 0 | 0 | +---+---+---+---+---+---+ | D | 1 | 1 | 0 | 0 | 0 | +---+---+---+---+---+---+ | E | 0 | 1 | 0 | 0 | 0 | +---+---+---+---+---+---+ """ tpm = np.array([ [int(s) for s in bin(x)[2:].zfill(5)[::-1]] for x in range(32) ]) tpm[np.where(np.sum(tpm[0:, 2:4], 1) == 2), 0] = 1 tpm[np.where(np.sum(tpm[0:, 3:5], 1) == 2), 1] = 1 tpm[np.where(np.sum(tpm[0:, 2:4], 1) < 2), 0] = 0 tpm[np.where(np.sum(tpm[0:, 3:5], 1) < 2), 1] = 0 cm = np.zeros((5, 5)) cm[2:4, 0] = 1 cm[3:, 1] = 1 return Network(tpm, cm=cm, node_labels=LABELS[:tpm.shape[1]])
python
def residue_network(): """The network for the residue example. Current and previous state are all nodes OFF. Diagram:: +~~~~~~~+ +~~~~~~~+ | A | | B | +~~>| (AND) | | (AND) |<~~+ | +~~~~~~~+ +~~~~~~~+ | | ^ ^ | | | | | | +~~~~~+ +~~~~~+ | | | | | +~~~+~~~+ +~+~~~+~+ +~~~+~~~+ | C | | D | | E | | | | | | | +~~~~~~~+ +~~~~~~~+ +~~~~~~~+ Connectivity matrix: +---+---+---+---+---+---+ | . | A | B | C | D | E | +---+---+---+---+---+---+ | A | 0 | 0 | 0 | 0 | 0 | +---+---+---+---+---+---+ | B | 0 | 0 | 0 | 0 | 0 | +---+---+---+---+---+---+ | C | 1 | 0 | 0 | 0 | 0 | +---+---+---+---+---+---+ | D | 1 | 1 | 0 | 0 | 0 | +---+---+---+---+---+---+ | E | 0 | 1 | 0 | 0 | 0 | +---+---+---+---+---+---+ """ tpm = np.array([ [int(s) for s in bin(x)[2:].zfill(5)[::-1]] for x in range(32) ]) tpm[np.where(np.sum(tpm[0:, 2:4], 1) == 2), 0] = 1 tpm[np.where(np.sum(tpm[0:, 3:5], 1) == 2), 1] = 1 tpm[np.where(np.sum(tpm[0:, 2:4], 1) < 2), 0] = 0 tpm[np.where(np.sum(tpm[0:, 3:5], 1) < 2), 1] = 0 cm = np.zeros((5, 5)) cm[2:4, 0] = 1 cm[3:, 1] = 1 return Network(tpm, cm=cm, node_labels=LABELS[:tpm.shape[1]])
[ "def", "residue_network", "(", ")", ":", "tpm", "=", "np", ".", "array", "(", "[", "[", "int", "(", "s", ")", "for", "s", "in", "bin", "(", "x", ")", "[", "2", ":", "]", ".", "zfill", "(", "5", ")", "[", ":", ":", "-", "1", "]", "]", "for", "x", "in", "range", "(", "32", ")", "]", ")", "tpm", "[", "np", ".", "where", "(", "np", ".", "sum", "(", "tpm", "[", "0", ":", ",", "2", ":", "4", "]", ",", "1", ")", "==", "2", ")", ",", "0", "]", "=", "1", "tpm", "[", "np", ".", "where", "(", "np", ".", "sum", "(", "tpm", "[", "0", ":", ",", "3", ":", "5", "]", ",", "1", ")", "==", "2", ")", ",", "1", "]", "=", "1", "tpm", "[", "np", ".", "where", "(", "np", ".", "sum", "(", "tpm", "[", "0", ":", ",", "2", ":", "4", "]", ",", "1", ")", "<", "2", ")", ",", "0", "]", "=", "0", "tpm", "[", "np", ".", "where", "(", "np", ".", "sum", "(", "tpm", "[", "0", ":", ",", "3", ":", "5", "]", ",", "1", ")", "<", "2", ")", ",", "1", "]", "=", "0", "cm", "=", "np", ".", "zeros", "(", "(", "5", ",", "5", ")", ")", "cm", "[", "2", ":", "4", ",", "0", "]", "=", "1", "cm", "[", "3", ":", ",", "1", "]", "=", "1", "return", "Network", "(", "tpm", ",", "cm", "=", "cm", ",", "node_labels", "=", "LABELS", "[", ":", "tpm", ".", "shape", "[", "1", "]", "]", ")" ]
The network for the residue example. Current and previous state are all nodes OFF. Diagram:: +~~~~~~~+ +~~~~~~~+ | A | | B | +~~>| (AND) | | (AND) |<~~+ | +~~~~~~~+ +~~~~~~~+ | | ^ ^ | | | | | | +~~~~~+ +~~~~~+ | | | | | +~~~+~~~+ +~+~~~+~+ +~~~+~~~+ | C | | D | | E | | | | | | | +~~~~~~~+ +~~~~~~~+ +~~~~~~~+ Connectivity matrix: +---+---+---+---+---+---+ | . | A | B | C | D | E | +---+---+---+---+---+---+ | A | 0 | 0 | 0 | 0 | 0 | +---+---+---+---+---+---+ | B | 0 | 0 | 0 | 0 | 0 | +---+---+---+---+---+---+ | C | 1 | 0 | 0 | 0 | 0 | +---+---+---+---+---+---+ | D | 1 | 1 | 0 | 0 | 0 | +---+---+---+---+---+---+ | E | 0 | 1 | 0 | 0 | 0 | +---+---+---+---+---+---+
[ "The", "network", "for", "the", "residue", "example", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/examples.py#L170-L218
train
wmayner/pyphi
pyphi/examples.py
propagation_delay_network
def propagation_delay_network(): """A version of the primary example from the IIT 3.0 paper with deterministic COPY gates on each connection. These copy gates essentially function as propagation delays on the signal between OR, AND and XOR gates from the original system. The current and previous states of the network are also selected to mimic the corresponding states from the IIT 3.0 paper. Diagram:: +----------+ +------------------+ C (COPY) +<----------------+ v +----------+ | +-------+-+ +-+-------+ | | +----------+ | | | A (OR) +--------------->+ B (COPY) +-------------->+ D (XOR) | | | +----------+ | | +-+-----+-+ +-+-----+-+ | ^ ^ | | | | | | | +----------+ +----------+ | | | +---+ H (COPY) +<----+ +---->+ F (COPY) +---+ | | +----------+ | | +----------+ | | | | | | +-+-----+-+ | | +----------+ | | +----------+ | +-------->+ I (COPY) +-->| G (AND) |<--+ E (COPY) +<--------+ +----------+ | | +----------+ +---------+ Connectivity matrix: +---+---+---+---+---+---+---+---+---+---+ | . | A | B | C | D | E | F | G | H | I | +---+---+---+---+---+---+---+---+---+---+ | A | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | +---+---+---+---+---+---+---+---+---+---+ | B | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | +---+---+---+---+---+---+---+---+---+---+ | C | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | +---+---+---+---+---+---+---+---+---+---+ | D | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | +---+---+---+---+---+---+---+---+---+---+ | E | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | +---+---+---+---+---+---+---+---+---+---+ | F | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | +---+---+---+---+---+---+---+---+---+---+ | G | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | +---+---+---+---+---+---+---+---+---+---+ | H | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | +---+---+---+---+---+---+---+---+---+---+ | I | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | +---+---+---+---+---+---+---+---+---+---+ States: In the IIT 3.0 paper example, the previous state of the system has only the XOR gate ON. For the propagation delay network, this corresponds to a state of ``(0, 0, 0, 1, 0, 0, 0, 0, 0)``. The current state of the IIT 3.0 example has only the OR gate ON. By advancing the propagation delay system two time steps, the current state ``(1, 0, 0, 0, 0, 0, 0, 0, 0)`` is achieved, with corresponding previous state ``(0, 0, 1, 0, 1, 0, 0, 0, 0)``. """ num_nodes = 9 num_states = 2 ** num_nodes tpm = np.zeros((num_states, num_nodes)) for previous_state_index, previous in enumerate(all_states(num_nodes)): current_state = [0 for i in range(num_nodes)] if previous[2] == 1 or previous[7] == 1: current_state[0] = 1 if previous[0] == 1: current_state[1] = 1 current_state[8] = 1 if previous[3] == 1: current_state[2] = 1 current_state[4] = 1 if previous[1] == 1 ^ previous[5] == 1: current_state[3] = 1 if previous[4] == 1 and previous[8] == 1: current_state[6] = 1 if previous[6] == 1: current_state[5] = 1 current_state[7] = 1 tpm[previous_state_index, :] = current_state cm = np.array([[0, 1, 0, 0, 0, 0, 0, 0, 1], [0, 0, 0, 1, 0, 0, 0, 0, 0], [1, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 1, 0, 1, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 1, 0, 0], [0, 0, 0, 1, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 1, 0, 1, 0], [1, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 1, 0, 0]]) return Network(tpm, cm=cm, node_labels=LABELS[:tpm.shape[1]])
python
def propagation_delay_network(): """A version of the primary example from the IIT 3.0 paper with deterministic COPY gates on each connection. These copy gates essentially function as propagation delays on the signal between OR, AND and XOR gates from the original system. The current and previous states of the network are also selected to mimic the corresponding states from the IIT 3.0 paper. Diagram:: +----------+ +------------------+ C (COPY) +<----------------+ v +----------+ | +-------+-+ +-+-------+ | | +----------+ | | | A (OR) +--------------->+ B (COPY) +-------------->+ D (XOR) | | | +----------+ | | +-+-----+-+ +-+-----+-+ | ^ ^ | | | | | | | +----------+ +----------+ | | | +---+ H (COPY) +<----+ +---->+ F (COPY) +---+ | | +----------+ | | +----------+ | | | | | | +-+-----+-+ | | +----------+ | | +----------+ | +-------->+ I (COPY) +-->| G (AND) |<--+ E (COPY) +<--------+ +----------+ | | +----------+ +---------+ Connectivity matrix: +---+---+---+---+---+---+---+---+---+---+ | . | A | B | C | D | E | F | G | H | I | +---+---+---+---+---+---+---+---+---+---+ | A | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | +---+---+---+---+---+---+---+---+---+---+ | B | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | +---+---+---+---+---+---+---+---+---+---+ | C | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | +---+---+---+---+---+---+---+---+---+---+ | D | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | +---+---+---+---+---+---+---+---+---+---+ | E | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | +---+---+---+---+---+---+---+---+---+---+ | F | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | +---+---+---+---+---+---+---+---+---+---+ | G | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | +---+---+---+---+---+---+---+---+---+---+ | H | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | +---+---+---+---+---+---+---+---+---+---+ | I | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | +---+---+---+---+---+---+---+---+---+---+ States: In the IIT 3.0 paper example, the previous state of the system has only the XOR gate ON. For the propagation delay network, this corresponds to a state of ``(0, 0, 0, 1, 0, 0, 0, 0, 0)``. The current state of the IIT 3.0 example has only the OR gate ON. By advancing the propagation delay system two time steps, the current state ``(1, 0, 0, 0, 0, 0, 0, 0, 0)`` is achieved, with corresponding previous state ``(0, 0, 1, 0, 1, 0, 0, 0, 0)``. """ num_nodes = 9 num_states = 2 ** num_nodes tpm = np.zeros((num_states, num_nodes)) for previous_state_index, previous in enumerate(all_states(num_nodes)): current_state = [0 for i in range(num_nodes)] if previous[2] == 1 or previous[7] == 1: current_state[0] = 1 if previous[0] == 1: current_state[1] = 1 current_state[8] = 1 if previous[3] == 1: current_state[2] = 1 current_state[4] = 1 if previous[1] == 1 ^ previous[5] == 1: current_state[3] = 1 if previous[4] == 1 and previous[8] == 1: current_state[6] = 1 if previous[6] == 1: current_state[5] = 1 current_state[7] = 1 tpm[previous_state_index, :] = current_state cm = np.array([[0, 1, 0, 0, 0, 0, 0, 0, 1], [0, 0, 0, 1, 0, 0, 0, 0, 0], [1, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 1, 0, 1, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 1, 0, 0], [0, 0, 0, 1, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 1, 0, 1, 0], [1, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 1, 0, 0]]) return Network(tpm, cm=cm, node_labels=LABELS[:tpm.shape[1]])
[ "def", "propagation_delay_network", "(", ")", ":", "num_nodes", "=", "9", "num_states", "=", "2", "**", "num_nodes", "tpm", "=", "np", ".", "zeros", "(", "(", "num_states", ",", "num_nodes", ")", ")", "for", "previous_state_index", ",", "previous", "in", "enumerate", "(", "all_states", "(", "num_nodes", ")", ")", ":", "current_state", "=", "[", "0", "for", "i", "in", "range", "(", "num_nodes", ")", "]", "if", "previous", "[", "2", "]", "==", "1", "or", "previous", "[", "7", "]", "==", "1", ":", "current_state", "[", "0", "]", "=", "1", "if", "previous", "[", "0", "]", "==", "1", ":", "current_state", "[", "1", "]", "=", "1", "current_state", "[", "8", "]", "=", "1", "if", "previous", "[", "3", "]", "==", "1", ":", "current_state", "[", "2", "]", "=", "1", "current_state", "[", "4", "]", "=", "1", "if", "previous", "[", "1", "]", "==", "1", "^", "previous", "[", "5", "]", "==", "1", ":", "current_state", "[", "3", "]", "=", "1", "if", "previous", "[", "4", "]", "==", "1", "and", "previous", "[", "8", "]", "==", "1", ":", "current_state", "[", "6", "]", "=", "1", "if", "previous", "[", "6", "]", "==", "1", ":", "current_state", "[", "5", "]", "=", "1", "current_state", "[", "7", "]", "=", "1", "tpm", "[", "previous_state_index", ",", ":", "]", "=", "current_state", "cm", "=", "np", ".", "array", "(", "[", "[", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", "]", ",", "[", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", "]", ",", "[", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", "]", ",", "[", "0", ",", "0", ",", "1", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", "]", ",", "[", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", "]", ",", "[", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", "]", ",", "[", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "1", ",", "0", "]", ",", "[", "1", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", "]", ",", "[", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", ",", "0", ",", "0", "]", "]", ")", "return", "Network", "(", "tpm", ",", "cm", "=", "cm", ",", "node_labels", "=", "LABELS", "[", ":", "tpm", ".", "shape", "[", "1", "]", "]", ")" ]
A version of the primary example from the IIT 3.0 paper with deterministic COPY gates on each connection. These copy gates essentially function as propagation delays on the signal between OR, AND and XOR gates from the original system. The current and previous states of the network are also selected to mimic the corresponding states from the IIT 3.0 paper. Diagram:: +----------+ +------------------+ C (COPY) +<----------------+ v +----------+ | +-------+-+ +-+-------+ | | +----------+ | | | A (OR) +--------------->+ B (COPY) +-------------->+ D (XOR) | | | +----------+ | | +-+-----+-+ +-+-----+-+ | ^ ^ | | | | | | | +----------+ +----------+ | | | +---+ H (COPY) +<----+ +---->+ F (COPY) +---+ | | +----------+ | | +----------+ | | | | | | +-+-----+-+ | | +----------+ | | +----------+ | +-------->+ I (COPY) +-->| G (AND) |<--+ E (COPY) +<--------+ +----------+ | | +----------+ +---------+ Connectivity matrix: +---+---+---+---+---+---+---+---+---+---+ | . | A | B | C | D | E | F | G | H | I | +---+---+---+---+---+---+---+---+---+---+ | A | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | +---+---+---+---+---+---+---+---+---+---+ | B | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | +---+---+---+---+---+---+---+---+---+---+ | C | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | +---+---+---+---+---+---+---+---+---+---+ | D | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | +---+---+---+---+---+---+---+---+---+---+ | E | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | +---+---+---+---+---+---+---+---+---+---+ | F | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | +---+---+---+---+---+---+---+---+---+---+ | G | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | +---+---+---+---+---+---+---+---+---+---+ | H | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | +---+---+---+---+---+---+---+---+---+---+ | I | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | +---+---+---+---+---+---+---+---+---+---+ States: In the IIT 3.0 paper example, the previous state of the system has only the XOR gate ON. For the propagation delay network, this corresponds to a state of ``(0, 0, 0, 1, 0, 0, 0, 0, 0)``. The current state of the IIT 3.0 example has only the OR gate ON. By advancing the propagation delay system two time steps, the current state ``(1, 0, 0, 0, 0, 0, 0, 0, 0)`` is achieved, with corresponding previous state ``(0, 0, 1, 0, 1, 0, 0, 0, 0)``.
[ "A", "version", "of", "the", "primary", "example", "from", "the", "IIT", "3", ".", "0", "paper", "with", "deterministic", "COPY", "gates", "on", "each", "connection", ".", "These", "copy", "gates", "essentially", "function", "as", "propagation", "delays", "on", "the", "signal", "between", "OR", "AND", "and", "XOR", "gates", "from", "the", "original", "system", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/examples.py#L395-L496
train
wmayner/pyphi
pyphi/examples.py
macro_network
def macro_network(): """A network of micro elements which has greater integrated information after coarse graining to a macro scale. """ tpm = np.array([[0.3, 0.3, 0.3, 0.3], [0.3, 0.3, 0.3, 0.3], [0.3, 0.3, 0.3, 0.3], [0.3, 0.3, 1.0, 1.0], [0.3, 0.3, 0.3, 0.3], [0.3, 0.3, 0.3, 0.3], [0.3, 0.3, 0.3, 0.3], [0.3, 0.3, 1.0, 1.0], [0.3, 0.3, 0.3, 0.3], [0.3, 0.3, 0.3, 0.3], [0.3, 0.3, 0.3, 0.3], [0.3, 0.3, 1.0, 1.0], [1.0, 1.0, 0.3, 0.3], [1.0, 1.0, 0.3, 0.3], [1.0, 1.0, 0.3, 0.3], [1.0, 1.0, 1.0, 1.0]]) return Network(tpm, node_labels=LABELS[:tpm.shape[1]])
python
def macro_network(): """A network of micro elements which has greater integrated information after coarse graining to a macro scale. """ tpm = np.array([[0.3, 0.3, 0.3, 0.3], [0.3, 0.3, 0.3, 0.3], [0.3, 0.3, 0.3, 0.3], [0.3, 0.3, 1.0, 1.0], [0.3, 0.3, 0.3, 0.3], [0.3, 0.3, 0.3, 0.3], [0.3, 0.3, 0.3, 0.3], [0.3, 0.3, 1.0, 1.0], [0.3, 0.3, 0.3, 0.3], [0.3, 0.3, 0.3, 0.3], [0.3, 0.3, 0.3, 0.3], [0.3, 0.3, 1.0, 1.0], [1.0, 1.0, 0.3, 0.3], [1.0, 1.0, 0.3, 0.3], [1.0, 1.0, 0.3, 0.3], [1.0, 1.0, 1.0, 1.0]]) return Network(tpm, node_labels=LABELS[:tpm.shape[1]])
[ "def", "macro_network", "(", ")", ":", "tpm", "=", "np", ".", "array", "(", "[", "[", "0.3", ",", "0.3", ",", "0.3", ",", "0.3", "]", ",", "[", "0.3", ",", "0.3", ",", "0.3", ",", "0.3", "]", ",", "[", "0.3", ",", "0.3", ",", "0.3", ",", "0.3", "]", ",", "[", "0.3", ",", "0.3", ",", "1.0", ",", "1.0", "]", ",", "[", "0.3", ",", "0.3", ",", "0.3", ",", "0.3", "]", ",", "[", "0.3", ",", "0.3", ",", "0.3", ",", "0.3", "]", ",", "[", "0.3", ",", "0.3", ",", "0.3", ",", "0.3", "]", ",", "[", "0.3", ",", "0.3", ",", "1.0", ",", "1.0", "]", ",", "[", "0.3", ",", "0.3", ",", "0.3", ",", "0.3", "]", ",", "[", "0.3", ",", "0.3", ",", "0.3", ",", "0.3", "]", ",", "[", "0.3", ",", "0.3", ",", "0.3", ",", "0.3", "]", ",", "[", "0.3", ",", "0.3", ",", "1.0", ",", "1.0", "]", ",", "[", "1.0", ",", "1.0", ",", "0.3", ",", "0.3", "]", ",", "[", "1.0", ",", "1.0", ",", "0.3", ",", "0.3", "]", ",", "[", "1.0", ",", "1.0", ",", "0.3", ",", "0.3", "]", ",", "[", "1.0", ",", "1.0", ",", "1.0", ",", "1.0", "]", "]", ")", "return", "Network", "(", "tpm", ",", "node_labels", "=", "LABELS", "[", ":", "tpm", ".", "shape", "[", "1", "]", "]", ")" ]
A network of micro elements which has greater integrated information after coarse graining to a macro scale.
[ "A", "network", "of", "micro", "elements", "which", "has", "greater", "integrated", "information", "after", "coarse", "graining", "to", "a", "macro", "scale", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/examples.py#L499-L519
train
wmayner/pyphi
pyphi/examples.py
blackbox_network
def blackbox_network(): """A micro-network to demonstrate blackboxing. Diagram:: +----------+ +-------------------->+ A (COPY) + <---------------+ | +----------+ | | +----------+ | | +-----------+ B (COPY) + <-------------+ | v v +----------+ | | +-+-----+-+ +-+-----+-+ | | | | | C (AND) | | F (AND) | | | | | +-+-----+-+ +-+-----+-+ | | ^ ^ | | +----------+ | | | +---------> + D (COPY) +---------------+ | | +----------+ | | +----------+ | +-------------------> + E (COPY) +-----------------+ +----------+ Connectivity Matrix: +---+---+---+---+---+---+---+ | . | A | B | C | D | E | F | +---+---+---+---+---+---+---+ | A | 0 | 0 | 1 | 0 | 0 | 0 | +---+---+---+---+---+---+---+ | B | 0 | 0 | 1 | 0 | 0 | 0 | +---+---+---+---+---+---+---+ | C | 0 | 0 | 0 | 1 | 1 | 0 | +---+---+---+---+---+---+---+ | D | 0 | 0 | 0 | 0 | 0 | 1 | +---+---+---+---+---+---+---+ | E | 0 | 0 | 0 | 0 | 0 | 1 | +---+---+---+---+---+---+---+ | F | 1 | 1 | 0 | 0 | 0 | 0 | +---+---+---+---+---+---+---+ In the documentation example, the state is (0, 0, 0, 0, 0, 0). """ num_nodes = 6 num_states = 2 ** num_nodes tpm = np.zeros((num_states, num_nodes)) for index, previous_state in enumerate(all_states(num_nodes)): current_state = [0 for i in range(num_nodes)] if previous_state[5] == 1: current_state[0] = 1 current_state[1] = 1 if previous_state[0] == 1 and previous_state[1]: current_state[2] = 1 if previous_state[2] == 1: current_state[3] = 1 current_state[4] = 1 if previous_state[3] == 1 and previous_state[4] == 1: current_state[5] = 1 tpm[index, :] = current_state cm = np.array([ [0, 0, 1, 0, 0, 0], [0, 0, 1, 0, 0, 0], [0, 0, 0, 1, 1, 0], [0, 0, 0, 0, 0, 1], [0, 0, 0, 0, 0, 1], [1, 1, 0, 0, 0, 0] ]) return Network(tpm, cm, node_labels=LABELS[:tpm.shape[1]])
python
def blackbox_network(): """A micro-network to demonstrate blackboxing. Diagram:: +----------+ +-------------------->+ A (COPY) + <---------------+ | +----------+ | | +----------+ | | +-----------+ B (COPY) + <-------------+ | v v +----------+ | | +-+-----+-+ +-+-----+-+ | | | | | C (AND) | | F (AND) | | | | | +-+-----+-+ +-+-----+-+ | | ^ ^ | | +----------+ | | | +---------> + D (COPY) +---------------+ | | +----------+ | | +----------+ | +-------------------> + E (COPY) +-----------------+ +----------+ Connectivity Matrix: +---+---+---+---+---+---+---+ | . | A | B | C | D | E | F | +---+---+---+---+---+---+---+ | A | 0 | 0 | 1 | 0 | 0 | 0 | +---+---+---+---+---+---+---+ | B | 0 | 0 | 1 | 0 | 0 | 0 | +---+---+---+---+---+---+---+ | C | 0 | 0 | 0 | 1 | 1 | 0 | +---+---+---+---+---+---+---+ | D | 0 | 0 | 0 | 0 | 0 | 1 | +---+---+---+---+---+---+---+ | E | 0 | 0 | 0 | 0 | 0 | 1 | +---+---+---+---+---+---+---+ | F | 1 | 1 | 0 | 0 | 0 | 0 | +---+---+---+---+---+---+---+ In the documentation example, the state is (0, 0, 0, 0, 0, 0). """ num_nodes = 6 num_states = 2 ** num_nodes tpm = np.zeros((num_states, num_nodes)) for index, previous_state in enumerate(all_states(num_nodes)): current_state = [0 for i in range(num_nodes)] if previous_state[5] == 1: current_state[0] = 1 current_state[1] = 1 if previous_state[0] == 1 and previous_state[1]: current_state[2] = 1 if previous_state[2] == 1: current_state[3] = 1 current_state[4] = 1 if previous_state[3] == 1 and previous_state[4] == 1: current_state[5] = 1 tpm[index, :] = current_state cm = np.array([ [0, 0, 1, 0, 0, 0], [0, 0, 1, 0, 0, 0], [0, 0, 0, 1, 1, 0], [0, 0, 0, 0, 0, 1], [0, 0, 0, 0, 0, 1], [1, 1, 0, 0, 0, 0] ]) return Network(tpm, cm, node_labels=LABELS[:tpm.shape[1]])
[ "def", "blackbox_network", "(", ")", ":", "num_nodes", "=", "6", "num_states", "=", "2", "**", "num_nodes", "tpm", "=", "np", ".", "zeros", "(", "(", "num_states", ",", "num_nodes", ")", ")", "for", "index", ",", "previous_state", "in", "enumerate", "(", "all_states", "(", "num_nodes", ")", ")", ":", "current_state", "=", "[", "0", "for", "i", "in", "range", "(", "num_nodes", ")", "]", "if", "previous_state", "[", "5", "]", "==", "1", ":", "current_state", "[", "0", "]", "=", "1", "current_state", "[", "1", "]", "=", "1", "if", "previous_state", "[", "0", "]", "==", "1", "and", "previous_state", "[", "1", "]", ":", "current_state", "[", "2", "]", "=", "1", "if", "previous_state", "[", "2", "]", "==", "1", ":", "current_state", "[", "3", "]", "=", "1", "current_state", "[", "4", "]", "=", "1", "if", "previous_state", "[", "3", "]", "==", "1", "and", "previous_state", "[", "4", "]", "==", "1", ":", "current_state", "[", "5", "]", "=", "1", "tpm", "[", "index", ",", ":", "]", "=", "current_state", "cm", "=", "np", ".", "array", "(", "[", "[", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", "]", ",", "[", "0", ",", "0", ",", "1", ",", "0", ",", "0", ",", "0", "]", ",", "[", "0", ",", "0", ",", "0", ",", "1", ",", "1", ",", "0", "]", ",", "[", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", "]", ",", "[", "0", ",", "0", ",", "0", ",", "0", ",", "0", ",", "1", "]", ",", "[", "1", ",", "1", ",", "0", ",", "0", ",", "0", ",", "0", "]", "]", ")", "return", "Network", "(", "tpm", ",", "cm", ",", "node_labels", "=", "LABELS", "[", ":", "tpm", ".", "shape", "[", "1", "]", "]", ")" ]
A micro-network to demonstrate blackboxing. Diagram:: +----------+ +-------------------->+ A (COPY) + <---------------+ | +----------+ | | +----------+ | | +-----------+ B (COPY) + <-------------+ | v v +----------+ | | +-+-----+-+ +-+-----+-+ | | | | | C (AND) | | F (AND) | | | | | +-+-----+-+ +-+-----+-+ | | ^ ^ | | +----------+ | | | +---------> + D (COPY) +---------------+ | | +----------+ | | +----------+ | +-------------------> + E (COPY) +-----------------+ +----------+ Connectivity Matrix: +---+---+---+---+---+---+---+ | . | A | B | C | D | E | F | +---+---+---+---+---+---+---+ | A | 0 | 0 | 1 | 0 | 0 | 0 | +---+---+---+---+---+---+---+ | B | 0 | 0 | 1 | 0 | 0 | 0 | +---+---+---+---+---+---+---+ | C | 0 | 0 | 0 | 1 | 1 | 0 | +---+---+---+---+---+---+---+ | D | 0 | 0 | 0 | 0 | 0 | 1 | +---+---+---+---+---+---+---+ | E | 0 | 0 | 0 | 0 | 0 | 1 | +---+---+---+---+---+---+---+ | F | 1 | 1 | 0 | 0 | 0 | 0 | +---+---+---+---+---+---+---+ In the documentation example, the state is (0, 0, 0, 0, 0, 0).
[ "A", "micro", "-", "network", "to", "demonstrate", "blackboxing", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/examples.py#L531-L603
train
wmayner/pyphi
pyphi/examples.py
actual_causation
def actual_causation(): """The actual causation example network, consisting of an ``OR`` and ``AND`` gate with self-loops. """ tpm = np.array([ [1, 0, 0, 0], [0, 1, 0, 0], [0, 1, 0, 0], [0, 0, 0, 1] ]) cm = np.array([ [1, 1], [1, 1] ]) return Network(tpm, cm, node_labels=('OR', 'AND'))
python
def actual_causation(): """The actual causation example network, consisting of an ``OR`` and ``AND`` gate with self-loops. """ tpm = np.array([ [1, 0, 0, 0], [0, 1, 0, 0], [0, 1, 0, 0], [0, 0, 0, 1] ]) cm = np.array([ [1, 1], [1, 1] ]) return Network(tpm, cm, node_labels=('OR', 'AND'))
[ "def", "actual_causation", "(", ")", ":", "tpm", "=", "np", ".", "array", "(", "[", "[", "1", ",", "0", ",", "0", ",", "0", "]", ",", "[", "0", ",", "1", ",", "0", ",", "0", "]", ",", "[", "0", ",", "1", ",", "0", ",", "0", "]", ",", "[", "0", ",", "0", ",", "0", ",", "1", "]", "]", ")", "cm", "=", "np", ".", "array", "(", "[", "[", "1", ",", "1", "]", ",", "[", "1", ",", "1", "]", "]", ")", "return", "Network", "(", "tpm", ",", "cm", ",", "node_labels", "=", "(", "'OR'", ",", "'AND'", ")", ")" ]
The actual causation example network, consisting of an ``OR`` and ``AND`` gate with self-loops.
[ "The", "actual", "causation", "example", "network", "consisting", "of", "an", "OR", "and", "AND", "gate", "with", "self", "-", "loops", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/examples.py#L1063-L1077
train
wmayner/pyphi
pyphi/examples.py
prevention
def prevention(): """The |Transition| for the prevention example from Actual Causation Figure 5D. """ tpm = np.array([ [0.5, 0.5, 1], [0.5, 0.5, 0], [0.5, 0.5, 1], [0.5, 0.5, 1], [0.5, 0.5, 1], [0.5, 0.5, 0], [0.5, 0.5, 1], [0.5, 0.5, 1] ]) cm = np.array([ [0, 0, 1], [0, 0, 1], [0, 0, 0] ]) network = Network(tpm, cm, node_labels=['A', 'B', 'F']) x_state = (1, 1, 1) y_state = (1, 1, 1) return Transition(network, x_state, y_state, (0, 1), (2,))
python
def prevention(): """The |Transition| for the prevention example from Actual Causation Figure 5D. """ tpm = np.array([ [0.5, 0.5, 1], [0.5, 0.5, 0], [0.5, 0.5, 1], [0.5, 0.5, 1], [0.5, 0.5, 1], [0.5, 0.5, 0], [0.5, 0.5, 1], [0.5, 0.5, 1] ]) cm = np.array([ [0, 0, 1], [0, 0, 1], [0, 0, 0] ]) network = Network(tpm, cm, node_labels=['A', 'B', 'F']) x_state = (1, 1, 1) y_state = (1, 1, 1) return Transition(network, x_state, y_state, (0, 1), (2,))
[ "def", "prevention", "(", ")", ":", "tpm", "=", "np", ".", "array", "(", "[", "[", "0.5", ",", "0.5", ",", "1", "]", ",", "[", "0.5", ",", "0.5", ",", "0", "]", ",", "[", "0.5", ",", "0.5", ",", "1", "]", ",", "[", "0.5", ",", "0.5", ",", "1", "]", ",", "[", "0.5", ",", "0.5", ",", "1", "]", ",", "[", "0.5", ",", "0.5", ",", "0", "]", ",", "[", "0.5", ",", "0.5", ",", "1", "]", ",", "[", "0.5", ",", "0.5", ",", "1", "]", "]", ")", "cm", "=", "np", ".", "array", "(", "[", "[", "0", ",", "0", ",", "1", "]", ",", "[", "0", ",", "0", ",", "1", "]", ",", "[", "0", ",", "0", ",", "0", "]", "]", ")", "network", "=", "Network", "(", "tpm", ",", "cm", ",", "node_labels", "=", "[", "'A'", ",", "'B'", ",", "'F'", "]", ")", "x_state", "=", "(", "1", ",", "1", ",", "1", ")", "y_state", "=", "(", "1", ",", "1", ",", "1", ")", "return", "Transition", "(", "network", ",", "x_state", ",", "y_state", ",", "(", "0", ",", "1", ")", ",", "(", "2", ",", ")", ")" ]
The |Transition| for the prevention example from Actual Causation Figure 5D.
[ "The", "|Transition|", "for", "the", "prevention", "example", "from", "Actual", "Causation", "Figure", "5D", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/examples.py#L1113-L1136
train
wmayner/pyphi
benchmarks/benchmarks/subsystem.py
clear_subsystem_caches
def clear_subsystem_caches(subsys): """Clear subsystem caches""" try: # New-style caches subsys._repertoire_cache.clear() subsys._mice_cache.clear() except TypeError: try: # Pre cache.clear() implementation subsys._repertoire_cache.cache = {} subsys._mice_cache.cache = {} except AttributeError: # Old school, pre cache refactor subsys._repertoire_cache = {} subsys._repertoire_cache_info = [0, 0] subsys._mice_cache = {}
python
def clear_subsystem_caches(subsys): """Clear subsystem caches""" try: # New-style caches subsys._repertoire_cache.clear() subsys._mice_cache.clear() except TypeError: try: # Pre cache.clear() implementation subsys._repertoire_cache.cache = {} subsys._mice_cache.cache = {} except AttributeError: # Old school, pre cache refactor subsys._repertoire_cache = {} subsys._repertoire_cache_info = [0, 0] subsys._mice_cache = {}
[ "def", "clear_subsystem_caches", "(", "subsys", ")", ":", "try", ":", "# New-style caches", "subsys", ".", "_repertoire_cache", ".", "clear", "(", ")", "subsys", ".", "_mice_cache", ".", "clear", "(", ")", "except", "TypeError", ":", "try", ":", "# Pre cache.clear() implementation", "subsys", ".", "_repertoire_cache", ".", "cache", "=", "{", "}", "subsys", ".", "_mice_cache", ".", "cache", "=", "{", "}", "except", "AttributeError", ":", "# Old school, pre cache refactor", "subsys", ".", "_repertoire_cache", "=", "{", "}", "subsys", ".", "_repertoire_cache_info", "=", "[", "0", ",", "0", "]", "subsys", ".", "_mice_cache", "=", "{", "}" ]
Clear subsystem caches
[ "Clear", "subsystem", "caches" ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/benchmarks/benchmarks/subsystem.py#L24-L39
train
wmayner/pyphi
pyphi/utils.py
all_states
def all_states(n, big_endian=False): """Return all binary states for a system. Args: n (int): The number of elements in the system. big_endian (bool): Whether to return the states in big-endian order instead of little-endian order. Yields: tuple[int]: The next state of an ``n``-element system, in little-endian order unless ``big_endian`` is ``True``. """ if n == 0: return for state in product((0, 1), repeat=n): if big_endian: yield state else: yield state[::-1]
python
def all_states(n, big_endian=False): """Return all binary states for a system. Args: n (int): The number of elements in the system. big_endian (bool): Whether to return the states in big-endian order instead of little-endian order. Yields: tuple[int]: The next state of an ``n``-element system, in little-endian order unless ``big_endian`` is ``True``. """ if n == 0: return for state in product((0, 1), repeat=n): if big_endian: yield state else: yield state[::-1]
[ "def", "all_states", "(", "n", ",", "big_endian", "=", "False", ")", ":", "if", "n", "==", "0", ":", "return", "for", "state", "in", "product", "(", "(", "0", ",", "1", ")", ",", "repeat", "=", "n", ")", ":", "if", "big_endian", ":", "yield", "state", "else", ":", "yield", "state", "[", ":", ":", "-", "1", "]" ]
Return all binary states for a system. Args: n (int): The number of elements in the system. big_endian (bool): Whether to return the states in big-endian order instead of little-endian order. Yields: tuple[int]: The next state of an ``n``-element system, in little-endian order unless ``big_endian`` is ``True``.
[ "Return", "all", "binary", "states", "for", "a", "system", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/utils.py#L27-L46
train
wmayner/pyphi
pyphi/utils.py
np_hash
def np_hash(a): """Return a hash of a NumPy array.""" if a is None: return hash(None) # Ensure that hashes are equal whatever the ordering in memory (C or # Fortran) a = np.ascontiguousarray(a) # Compute the digest and return a decimal int return int(hashlib.sha1(a.view(a.dtype)).hexdigest(), 16)
python
def np_hash(a): """Return a hash of a NumPy array.""" if a is None: return hash(None) # Ensure that hashes are equal whatever the ordering in memory (C or # Fortran) a = np.ascontiguousarray(a) # Compute the digest and return a decimal int return int(hashlib.sha1(a.view(a.dtype)).hexdigest(), 16)
[ "def", "np_hash", "(", "a", ")", ":", "if", "a", "is", "None", ":", "return", "hash", "(", "None", ")", "# Ensure that hashes are equal whatever the ordering in memory (C or", "# Fortran)", "a", "=", "np", ".", "ascontiguousarray", "(", "a", ")", "# Compute the digest and return a decimal int", "return", "int", "(", "hashlib", ".", "sha1", "(", "a", ".", "view", "(", "a", ".", "dtype", ")", ")", ".", "hexdigest", "(", ")", ",", "16", ")" ]
Return a hash of a NumPy array.
[ "Return", "a", "hash", "of", "a", "NumPy", "array", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/utils.py#L55-L63
train
wmayner/pyphi
pyphi/utils.py
powerset
def powerset(iterable, nonempty=False, reverse=False): """Generate the power set of an iterable. Args: iterable (Iterable): The iterable from which to generate the power set. Keyword Args: nonempty (boolean): If True, don't include the empty set. reverse (boolean): If True, reverse the order of the powerset. Returns: Iterable: An iterator over the power set. Example: >>> ps = powerset(np.arange(2)) >>> list(ps) [(), (0,), (1,), (0, 1)] >>> ps = powerset(np.arange(2), nonempty=True) >>> list(ps) [(0,), (1,), (0, 1)] >>> ps = powerset(np.arange(2), nonempty=True, reverse=True) >>> list(ps) [(1, 0), (1,), (0,)] """ iterable = list(iterable) if nonempty: # Don't include 0-length subsets start = 1 else: start = 0 seq_sizes = range(start, len(iterable) + 1) if reverse: seq_sizes = reversed(seq_sizes) iterable.reverse() return chain.from_iterable(combinations(iterable, r) for r in seq_sizes)
python
def powerset(iterable, nonempty=False, reverse=False): """Generate the power set of an iterable. Args: iterable (Iterable): The iterable from which to generate the power set. Keyword Args: nonempty (boolean): If True, don't include the empty set. reverse (boolean): If True, reverse the order of the powerset. Returns: Iterable: An iterator over the power set. Example: >>> ps = powerset(np.arange(2)) >>> list(ps) [(), (0,), (1,), (0, 1)] >>> ps = powerset(np.arange(2), nonempty=True) >>> list(ps) [(0,), (1,), (0, 1)] >>> ps = powerset(np.arange(2), nonempty=True, reverse=True) >>> list(ps) [(1, 0), (1,), (0,)] """ iterable = list(iterable) if nonempty: # Don't include 0-length subsets start = 1 else: start = 0 seq_sizes = range(start, len(iterable) + 1) if reverse: seq_sizes = reversed(seq_sizes) iterable.reverse() return chain.from_iterable(combinations(iterable, r) for r in seq_sizes)
[ "def", "powerset", "(", "iterable", ",", "nonempty", "=", "False", ",", "reverse", "=", "False", ")", ":", "iterable", "=", "list", "(", "iterable", ")", "if", "nonempty", ":", "# Don't include 0-length subsets", "start", "=", "1", "else", ":", "start", "=", "0", "seq_sizes", "=", "range", "(", "start", ",", "len", "(", "iterable", ")", "+", "1", ")", "if", "reverse", ":", "seq_sizes", "=", "reversed", "(", "seq_sizes", ")", "iterable", ".", "reverse", "(", ")", "return", "chain", ".", "from_iterable", "(", "combinations", "(", "iterable", ",", "r", ")", "for", "r", "in", "seq_sizes", ")" ]
Generate the power set of an iterable. Args: iterable (Iterable): The iterable from which to generate the power set. Keyword Args: nonempty (boolean): If True, don't include the empty set. reverse (boolean): If True, reverse the order of the powerset. Returns: Iterable: An iterator over the power set. Example: >>> ps = powerset(np.arange(2)) >>> list(ps) [(), (0,), (1,), (0, 1)] >>> ps = powerset(np.arange(2), nonempty=True) >>> list(ps) [(0,), (1,), (0, 1)] >>> ps = powerset(np.arange(2), nonempty=True, reverse=True) >>> list(ps) [(1, 0), (1,), (0,)]
[ "Generate", "the", "power", "set", "of", "an", "iterable", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/utils.py#L146-L183
train
wmayner/pyphi
pyphi/utils.py
load_data
def load_data(directory, num): """Load numpy data from the data directory. The files should stored in ``../data/<dir>`` and named ``0.npy, 1.npy, ... <num - 1>.npy``. Returns: list: A list of loaded data, such that ``list[i]`` contains the the contents of ``i.npy``. """ root = os.path.abspath(os.path.dirname(__file__)) def get_path(i): # pylint: disable=missing-docstring return os.path.join(root, 'data', directory, str(i) + '.npy') return [np.load(get_path(i)) for i in range(num)]
python
def load_data(directory, num): """Load numpy data from the data directory. The files should stored in ``../data/<dir>`` and named ``0.npy, 1.npy, ... <num - 1>.npy``. Returns: list: A list of loaded data, such that ``list[i]`` contains the the contents of ``i.npy``. """ root = os.path.abspath(os.path.dirname(__file__)) def get_path(i): # pylint: disable=missing-docstring return os.path.join(root, 'data', directory, str(i) + '.npy') return [np.load(get_path(i)) for i in range(num)]
[ "def", "load_data", "(", "directory", ",", "num", ")", ":", "root", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ")", "def", "get_path", "(", "i", ")", ":", "# pylint: disable=missing-docstring", "return", "os", ".", "path", ".", "join", "(", "root", ",", "'data'", ",", "directory", ",", "str", "(", "i", ")", "+", "'.npy'", ")", "return", "[", "np", ".", "load", "(", "get_path", "(", "i", ")", ")", "for", "i", "in", "range", "(", "num", ")", "]" ]
Load numpy data from the data directory. The files should stored in ``../data/<dir>`` and named ``0.npy, 1.npy, ... <num - 1>.npy``. Returns: list: A list of loaded data, such that ``list[i]`` contains the the contents of ``i.npy``.
[ "Load", "numpy", "data", "from", "the", "data", "directory", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/utils.py#L186-L201
train
wmayner/pyphi
pyphi/utils.py
time_annotated
def time_annotated(func, *args, **kwargs): """Annotate the decorated function or method with the total execution time. The result is annotated with a `time` attribute. """ start = time() result = func(*args, **kwargs) end = time() result.time = round(end - start, config.PRECISION) return result
python
def time_annotated(func, *args, **kwargs): """Annotate the decorated function or method with the total execution time. The result is annotated with a `time` attribute. """ start = time() result = func(*args, **kwargs) end = time() result.time = round(end - start, config.PRECISION) return result
[ "def", "time_annotated", "(", "func", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "start", "=", "time", "(", ")", "result", "=", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "end", "=", "time", "(", ")", "result", ".", "time", "=", "round", "(", "end", "-", "start", ",", "config", ".", "PRECISION", ")", "return", "result" ]
Annotate the decorated function or method with the total execution time. The result is annotated with a `time` attribute.
[ "Annotate", "the", "decorated", "function", "or", "method", "with", "the", "total", "execution", "time", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/utils.py#L207-L217
train
wmayner/pyphi
pyphi/models/mechanism.py
_null_ria
def _null_ria(direction, mechanism, purview, repertoire=None, phi=0.0): """The irreducibility analysis for a reducible mechanism.""" # TODO Use properties here to infer mechanism and purview from # partition yet access them with .mechanism and .partition return RepertoireIrreducibilityAnalysis( direction=direction, mechanism=mechanism, purview=purview, partition=None, repertoire=repertoire, partitioned_repertoire=None, phi=phi )
python
def _null_ria(direction, mechanism, purview, repertoire=None, phi=0.0): """The irreducibility analysis for a reducible mechanism.""" # TODO Use properties here to infer mechanism and purview from # partition yet access them with .mechanism and .partition return RepertoireIrreducibilityAnalysis( direction=direction, mechanism=mechanism, purview=purview, partition=None, repertoire=repertoire, partitioned_repertoire=None, phi=phi )
[ "def", "_null_ria", "(", "direction", ",", "mechanism", ",", "purview", ",", "repertoire", "=", "None", ",", "phi", "=", "0.0", ")", ":", "# TODO Use properties here to infer mechanism and purview from", "# partition yet access them with .mechanism and .partition", "return", "RepertoireIrreducibilityAnalysis", "(", "direction", "=", "direction", ",", "mechanism", "=", "mechanism", ",", "purview", "=", "purview", ",", "partition", "=", "None", ",", "repertoire", "=", "repertoire", ",", "partitioned_repertoire", "=", "None", ",", "phi", "=", "phi", ")" ]
The irreducibility analysis for a reducible mechanism.
[ "The", "irreducibility", "analysis", "for", "a", "reducible", "mechanism", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/models/mechanism.py#L135-L147
train
wmayner/pyphi
pyphi/models/mechanism.py
MaximallyIrreducibleCauseOrEffect.damaged_by_cut
def damaged_by_cut(self, subsystem): """Return ``True`` if this MICE is affected by the subsystem's cut. The cut affects the MICE if it either splits the MICE's mechanism or splits the connections between the purview and mechanism. """ return (subsystem.cut.splits_mechanism(self.mechanism) or np.any(self._relevant_connections(subsystem) * subsystem.cut.cut_matrix(subsystem.network.size) == 1))
python
def damaged_by_cut(self, subsystem): """Return ``True`` if this MICE is affected by the subsystem's cut. The cut affects the MICE if it either splits the MICE's mechanism or splits the connections between the purview and mechanism. """ return (subsystem.cut.splits_mechanism(self.mechanism) or np.any(self._relevant_connections(subsystem) * subsystem.cut.cut_matrix(subsystem.network.size) == 1))
[ "def", "damaged_by_cut", "(", "self", ",", "subsystem", ")", ":", "return", "(", "subsystem", ".", "cut", ".", "splits_mechanism", "(", "self", ".", "mechanism", ")", "or", "np", ".", "any", "(", "self", ".", "_relevant_connections", "(", "subsystem", ")", "*", "subsystem", ".", "cut", ".", "cut_matrix", "(", "subsystem", ".", "network", ".", "size", ")", "==", "1", ")", ")" ]
Return ``True`` if this MICE is affected by the subsystem's cut. The cut affects the MICE if it either splits the MICE's mechanism or splits the connections between the purview and mechanism.
[ "Return", "True", "if", "this", "MICE", "is", "affected", "by", "the", "subsystem", "s", "cut", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/models/mechanism.py#L277-L285
train
wmayner/pyphi
pyphi/models/mechanism.py
Concept.eq_repertoires
def eq_repertoires(self, other): """Return whether this concept has the same repertoires as another. .. warning:: This only checks if the cause and effect repertoires are equal as arrays; mechanisms, purviews, or even the nodes that the mechanism and purview indices refer to, might be different. """ return ( np.array_equal(self.cause_repertoire, other.cause_repertoire) and np.array_equal(self.effect_repertoire, other.effect_repertoire))
python
def eq_repertoires(self, other): """Return whether this concept has the same repertoires as another. .. warning:: This only checks if the cause and effect repertoires are equal as arrays; mechanisms, purviews, or even the nodes that the mechanism and purview indices refer to, might be different. """ return ( np.array_equal(self.cause_repertoire, other.cause_repertoire) and np.array_equal(self.effect_repertoire, other.effect_repertoire))
[ "def", "eq_repertoires", "(", "self", ",", "other", ")", ":", "return", "(", "np", ".", "array_equal", "(", "self", ".", "cause_repertoire", ",", "other", ".", "cause_repertoire", ")", "and", "np", ".", "array_equal", "(", "self", ".", "effect_repertoire", ",", "other", ".", "effect_repertoire", ")", ")" ]
Return whether this concept has the same repertoires as another. .. warning:: This only checks if the cause and effect repertoires are equal as arrays; mechanisms, purviews, or even the nodes that the mechanism and purview indices refer to, might be different.
[ "Return", "whether", "this", "concept", "has", "the", "same", "repertoires", "as", "another", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/models/mechanism.py#L431-L441
train
wmayner/pyphi
pyphi/models/mechanism.py
Concept.emd_eq
def emd_eq(self, other): """Return whether this concept is equal to another in the context of an EMD calculation. """ return (self.phi == other.phi and self.mechanism == other.mechanism and self.eq_repertoires(other))
python
def emd_eq(self, other): """Return whether this concept is equal to another in the context of an EMD calculation. """ return (self.phi == other.phi and self.mechanism == other.mechanism and self.eq_repertoires(other))
[ "def", "emd_eq", "(", "self", ",", "other", ")", ":", "return", "(", "self", ".", "phi", "==", "other", ".", "phi", "and", "self", ".", "mechanism", "==", "other", ".", "mechanism", "and", "self", ".", "eq_repertoires", "(", "other", ")", ")" ]
Return whether this concept is equal to another in the context of an EMD calculation.
[ "Return", "whether", "this", "concept", "is", "equal", "to", "another", "in", "the", "context", "of", "an", "EMD", "calculation", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/models/mechanism.py#L443-L449
train
wmayner/pyphi
pyphi/actual.py
directed_account
def directed_account(transition, direction, mechanisms=False, purviews=False, allow_neg=False): """Return the set of all |CausalLinks| of the specified direction.""" if mechanisms is False: mechanisms = utils.powerset(transition.mechanism_indices(direction), nonempty=True) links = [ transition.find_causal_link(direction, mechanism, purviews=purviews, allow_neg=allow_neg) for mechanism in mechanisms] # Filter out causal links with zero alpha return DirectedAccount(filter(None, links))
python
def directed_account(transition, direction, mechanisms=False, purviews=False, allow_neg=False): """Return the set of all |CausalLinks| of the specified direction.""" if mechanisms is False: mechanisms = utils.powerset(transition.mechanism_indices(direction), nonempty=True) links = [ transition.find_causal_link(direction, mechanism, purviews=purviews, allow_neg=allow_neg) for mechanism in mechanisms] # Filter out causal links with zero alpha return DirectedAccount(filter(None, links))
[ "def", "directed_account", "(", "transition", ",", "direction", ",", "mechanisms", "=", "False", ",", "purviews", "=", "False", ",", "allow_neg", "=", "False", ")", ":", "if", "mechanisms", "is", "False", ":", "mechanisms", "=", "utils", ".", "powerset", "(", "transition", ".", "mechanism_indices", "(", "direction", ")", ",", "nonempty", "=", "True", ")", "links", "=", "[", "transition", ".", "find_causal_link", "(", "direction", ",", "mechanism", ",", "purviews", "=", "purviews", ",", "allow_neg", "=", "allow_neg", ")", "for", "mechanism", "in", "mechanisms", "]", "# Filter out causal links with zero alpha", "return", "DirectedAccount", "(", "filter", "(", "None", ",", "links", ")", ")" ]
Return the set of all |CausalLinks| of the specified direction.
[ "Return", "the", "set", "of", "all", "|CausalLinks|", "of", "the", "specified", "direction", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/actual.py#L440-L452
train
wmayner/pyphi
pyphi/actual.py
account
def account(transition, direction=Direction.BIDIRECTIONAL): """Return the set of all causal links for a |Transition|. Args: transition (Transition): The transition of interest. Keyword Args: direction (Direction): By default the account contains actual causes and actual effects. """ if direction != Direction.BIDIRECTIONAL: return directed_account(transition, direction) return Account(directed_account(transition, Direction.CAUSE) + directed_account(transition, Direction.EFFECT))
python
def account(transition, direction=Direction.BIDIRECTIONAL): """Return the set of all causal links for a |Transition|. Args: transition (Transition): The transition of interest. Keyword Args: direction (Direction): By default the account contains actual causes and actual effects. """ if direction != Direction.BIDIRECTIONAL: return directed_account(transition, direction) return Account(directed_account(transition, Direction.CAUSE) + directed_account(transition, Direction.EFFECT))
[ "def", "account", "(", "transition", ",", "direction", "=", "Direction", ".", "BIDIRECTIONAL", ")", ":", "if", "direction", "!=", "Direction", ".", "BIDIRECTIONAL", ":", "return", "directed_account", "(", "transition", ",", "direction", ")", "return", "Account", "(", "directed_account", "(", "transition", ",", "Direction", ".", "CAUSE", ")", "+", "directed_account", "(", "transition", ",", "Direction", ".", "EFFECT", ")", ")" ]
Return the set of all causal links for a |Transition|. Args: transition (Transition): The transition of interest. Keyword Args: direction (Direction): By default the account contains actual causes and actual effects.
[ "Return", "the", "set", "of", "all", "causal", "links", "for", "a", "|Transition|", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/actual.py#L455-L469
train
wmayner/pyphi
pyphi/actual.py
_evaluate_cut
def _evaluate_cut(transition, cut, unpartitioned_account, direction=Direction.BIDIRECTIONAL): """Find the |AcSystemIrreducibilityAnalysis| for a given cut.""" cut_transition = transition.apply_cut(cut) partitioned_account = account(cut_transition, direction) log.debug("Finished evaluating %s.", cut) alpha = account_distance(unpartitioned_account, partitioned_account) return AcSystemIrreducibilityAnalysis( alpha=round(alpha, config.PRECISION), direction=direction, account=unpartitioned_account, partitioned_account=partitioned_account, transition=transition, cut=cut)
python
def _evaluate_cut(transition, cut, unpartitioned_account, direction=Direction.BIDIRECTIONAL): """Find the |AcSystemIrreducibilityAnalysis| for a given cut.""" cut_transition = transition.apply_cut(cut) partitioned_account = account(cut_transition, direction) log.debug("Finished evaluating %s.", cut) alpha = account_distance(unpartitioned_account, partitioned_account) return AcSystemIrreducibilityAnalysis( alpha=round(alpha, config.PRECISION), direction=direction, account=unpartitioned_account, partitioned_account=partitioned_account, transition=transition, cut=cut)
[ "def", "_evaluate_cut", "(", "transition", ",", "cut", ",", "unpartitioned_account", ",", "direction", "=", "Direction", ".", "BIDIRECTIONAL", ")", ":", "cut_transition", "=", "transition", ".", "apply_cut", "(", "cut", ")", "partitioned_account", "=", "account", "(", "cut_transition", ",", "direction", ")", "log", ".", "debug", "(", "\"Finished evaluating %s.\"", ",", "cut", ")", "alpha", "=", "account_distance", "(", "unpartitioned_account", ",", "partitioned_account", ")", "return", "AcSystemIrreducibilityAnalysis", "(", "alpha", "=", "round", "(", "alpha", ",", "config", ".", "PRECISION", ")", ",", "direction", "=", "direction", ",", "account", "=", "unpartitioned_account", ",", "partitioned_account", "=", "partitioned_account", ",", "transition", "=", "transition", ",", "cut", "=", "cut", ")" ]
Find the |AcSystemIrreducibilityAnalysis| for a given cut.
[ "Find", "the", "|AcSystemIrreducibilityAnalysis|", "for", "a", "given", "cut", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/actual.py#L492-L507
train
wmayner/pyphi
pyphi/actual.py
_get_cuts
def _get_cuts(transition, direction): """A list of possible cuts to a transition.""" n = transition.network.size if direction is Direction.BIDIRECTIONAL: yielded = set() for cut in chain(_get_cuts(transition, Direction.CAUSE), _get_cuts(transition, Direction.EFFECT)): cm = utils.np_hashable(cut.cut_matrix(n)) if cm not in yielded: yielded.add(cm) yield cut else: mechanism = transition.mechanism_indices(direction) purview = transition.purview_indices(direction) for partition in mip_partitions(mechanism, purview, transition.node_labels): yield ActualCut(direction, partition, transition.node_labels)
python
def _get_cuts(transition, direction): """A list of possible cuts to a transition.""" n = transition.network.size if direction is Direction.BIDIRECTIONAL: yielded = set() for cut in chain(_get_cuts(transition, Direction.CAUSE), _get_cuts(transition, Direction.EFFECT)): cm = utils.np_hashable(cut.cut_matrix(n)) if cm not in yielded: yielded.add(cm) yield cut else: mechanism = transition.mechanism_indices(direction) purview = transition.purview_indices(direction) for partition in mip_partitions(mechanism, purview, transition.node_labels): yield ActualCut(direction, partition, transition.node_labels)
[ "def", "_get_cuts", "(", "transition", ",", "direction", ")", ":", "n", "=", "transition", ".", "network", ".", "size", "if", "direction", "is", "Direction", ".", "BIDIRECTIONAL", ":", "yielded", "=", "set", "(", ")", "for", "cut", "in", "chain", "(", "_get_cuts", "(", "transition", ",", "Direction", ".", "CAUSE", ")", ",", "_get_cuts", "(", "transition", ",", "Direction", ".", "EFFECT", ")", ")", ":", "cm", "=", "utils", ".", "np_hashable", "(", "cut", ".", "cut_matrix", "(", "n", ")", ")", "if", "cm", "not", "in", "yielded", ":", "yielded", ".", "add", "(", "cm", ")", "yield", "cut", "else", ":", "mechanism", "=", "transition", ".", "mechanism_indices", "(", "direction", ")", "purview", "=", "transition", ".", "purview_indices", "(", "direction", ")", "for", "partition", "in", "mip_partitions", "(", "mechanism", ",", "purview", ",", "transition", ".", "node_labels", ")", ":", "yield", "ActualCut", "(", "direction", ",", "partition", ",", "transition", ".", "node_labels", ")" ]
A list of possible cuts to a transition.
[ "A", "list", "of", "possible", "cuts", "to", "a", "transition", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/actual.py#L511-L529
train
wmayner/pyphi
pyphi/actual.py
sia
def sia(transition, direction=Direction.BIDIRECTIONAL): """Return the minimal information partition of a transition in a specific direction. Args: transition (Transition): The candidate system. Returns: AcSystemIrreducibilityAnalysis: A nested structure containing all the data from the intermediate calculations. The top level contains the basic irreducibility information for the given subsystem. """ validate.direction(direction, allow_bi=True) log.info("Calculating big-alpha for %s...", transition) if not transition: log.info('Transition %s is empty; returning null SIA ' 'immediately.', transition) return _null_ac_sia(transition, direction) if not connectivity.is_weak(transition.network.cm, transition.node_indices): log.info('%s is not strongly/weakly connected; returning null SIA ' 'immediately.', transition) return _null_ac_sia(transition, direction) log.debug("Finding unpartitioned account...") unpartitioned_account = account(transition, direction) log.debug("Found unpartitioned account.") if not unpartitioned_account: log.info('Empty unpartitioned account; returning null AC SIA ' 'immediately.') return _null_ac_sia(transition, direction) cuts = _get_cuts(transition, direction) engine = ComputeACSystemIrreducibility( cuts, transition, direction, unpartitioned_account) result = engine.run_sequential() log.info("Finished calculating big-ac-phi data for %s.", transition) log.debug("RESULT: \n%s", result) return result
python
def sia(transition, direction=Direction.BIDIRECTIONAL): """Return the minimal information partition of a transition in a specific direction. Args: transition (Transition): The candidate system. Returns: AcSystemIrreducibilityAnalysis: A nested structure containing all the data from the intermediate calculations. The top level contains the basic irreducibility information for the given subsystem. """ validate.direction(direction, allow_bi=True) log.info("Calculating big-alpha for %s...", transition) if not transition: log.info('Transition %s is empty; returning null SIA ' 'immediately.', transition) return _null_ac_sia(transition, direction) if not connectivity.is_weak(transition.network.cm, transition.node_indices): log.info('%s is not strongly/weakly connected; returning null SIA ' 'immediately.', transition) return _null_ac_sia(transition, direction) log.debug("Finding unpartitioned account...") unpartitioned_account = account(transition, direction) log.debug("Found unpartitioned account.") if not unpartitioned_account: log.info('Empty unpartitioned account; returning null AC SIA ' 'immediately.') return _null_ac_sia(transition, direction) cuts = _get_cuts(transition, direction) engine = ComputeACSystemIrreducibility( cuts, transition, direction, unpartitioned_account) result = engine.run_sequential() log.info("Finished calculating big-ac-phi data for %s.", transition) log.debug("RESULT: \n%s", result) return result
[ "def", "sia", "(", "transition", ",", "direction", "=", "Direction", ".", "BIDIRECTIONAL", ")", ":", "validate", ".", "direction", "(", "direction", ",", "allow_bi", "=", "True", ")", "log", ".", "info", "(", "\"Calculating big-alpha for %s...\"", ",", "transition", ")", "if", "not", "transition", ":", "log", ".", "info", "(", "'Transition %s is empty; returning null SIA '", "'immediately.'", ",", "transition", ")", "return", "_null_ac_sia", "(", "transition", ",", "direction", ")", "if", "not", "connectivity", ".", "is_weak", "(", "transition", ".", "network", ".", "cm", ",", "transition", ".", "node_indices", ")", ":", "log", ".", "info", "(", "'%s is not strongly/weakly connected; returning null SIA '", "'immediately.'", ",", "transition", ")", "return", "_null_ac_sia", "(", "transition", ",", "direction", ")", "log", ".", "debug", "(", "\"Finding unpartitioned account...\"", ")", "unpartitioned_account", "=", "account", "(", "transition", ",", "direction", ")", "log", ".", "debug", "(", "\"Found unpartitioned account.\"", ")", "if", "not", "unpartitioned_account", ":", "log", ".", "info", "(", "'Empty unpartitioned account; returning null AC SIA '", "'immediately.'", ")", "return", "_null_ac_sia", "(", "transition", ",", "direction", ")", "cuts", "=", "_get_cuts", "(", "transition", ",", "direction", ")", "engine", "=", "ComputeACSystemIrreducibility", "(", "cuts", ",", "transition", ",", "direction", ",", "unpartitioned_account", ")", "result", "=", "engine", ".", "run_sequential", "(", ")", "log", ".", "info", "(", "\"Finished calculating big-ac-phi data for %s.\"", ",", "transition", ")", "log", ".", "debug", "(", "\"RESULT: \\n%s\"", ",", "result", ")", "return", "result" ]
Return the minimal information partition of a transition in a specific direction. Args: transition (Transition): The candidate system. Returns: AcSystemIrreducibilityAnalysis: A nested structure containing all the data from the intermediate calculations. The top level contains the basic irreducibility information for the given subsystem.
[ "Return", "the", "minimal", "information", "partition", "of", "a", "transition", "in", "a", "specific", "direction", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/actual.py#L532-L573
train
wmayner/pyphi
pyphi/actual.py
nexus
def nexus(network, before_state, after_state, direction=Direction.BIDIRECTIONAL): """Return a tuple of all irreducible nexus of the network.""" validate.is_network(network) sias = (sia(transition, direction) for transition in transitions(network, before_state, after_state)) return tuple(sorted(filter(None, sias), reverse=True))
python
def nexus(network, before_state, after_state, direction=Direction.BIDIRECTIONAL): """Return a tuple of all irreducible nexus of the network.""" validate.is_network(network) sias = (sia(transition, direction) for transition in transitions(network, before_state, after_state)) return tuple(sorted(filter(None, sias), reverse=True))
[ "def", "nexus", "(", "network", ",", "before_state", ",", "after_state", ",", "direction", "=", "Direction", ".", "BIDIRECTIONAL", ")", ":", "validate", ".", "is_network", "(", "network", ")", "sias", "=", "(", "sia", "(", "transition", ",", "direction", ")", "for", "transition", "in", "transitions", "(", "network", ",", "before_state", ",", "after_state", ")", ")", "return", "tuple", "(", "sorted", "(", "filter", "(", "None", ",", "sias", ")", ",", "reverse", "=", "True", ")", ")" ]
Return a tuple of all irreducible nexus of the network.
[ "Return", "a", "tuple", "of", "all", "irreducible", "nexus", "of", "the", "network", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/actual.py#L626-L633
train
wmayner/pyphi
pyphi/actual.py
causal_nexus
def causal_nexus(network, before_state, after_state, direction=Direction.BIDIRECTIONAL): """Return the causal nexus of the network.""" validate.is_network(network) log.info("Calculating causal nexus...") result = nexus(network, before_state, after_state, direction) if result: result = max(result) else: null_transition = Transition( network, before_state, after_state, (), ()) result = _null_ac_sia(null_transition, direction) log.info("Finished calculating causal nexus.") log.debug("RESULT: \n%s", result) return result
python
def causal_nexus(network, before_state, after_state, direction=Direction.BIDIRECTIONAL): """Return the causal nexus of the network.""" validate.is_network(network) log.info("Calculating causal nexus...") result = nexus(network, before_state, after_state, direction) if result: result = max(result) else: null_transition = Transition( network, before_state, after_state, (), ()) result = _null_ac_sia(null_transition, direction) log.info("Finished calculating causal nexus.") log.debug("RESULT: \n%s", result) return result
[ "def", "causal_nexus", "(", "network", ",", "before_state", ",", "after_state", ",", "direction", "=", "Direction", ".", "BIDIRECTIONAL", ")", ":", "validate", ".", "is_network", "(", "network", ")", "log", ".", "info", "(", "\"Calculating causal nexus...\"", ")", "result", "=", "nexus", "(", "network", ",", "before_state", ",", "after_state", ",", "direction", ")", "if", "result", ":", "result", "=", "max", "(", "result", ")", "else", ":", "null_transition", "=", "Transition", "(", "network", ",", "before_state", ",", "after_state", ",", "(", ")", ",", "(", ")", ")", "result", "=", "_null_ac_sia", "(", "null_transition", ",", "direction", ")", "log", ".", "info", "(", "\"Finished calculating causal nexus.\"", ")", "log", ".", "debug", "(", "\"RESULT: \\n%s\"", ",", "result", ")", "return", "result" ]
Return the causal nexus of the network.
[ "Return", "the", "causal", "nexus", "of", "the", "network", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/actual.py#L636-L652
train
wmayner/pyphi
pyphi/actual.py
nice_true_ces
def nice_true_ces(tc): """Format a true |CauseEffectStructure|.""" cause_list = [] next_list = [] cause = '<--' effect = '-->' for event in tc: if event.direction == Direction.CAUSE: cause_list.append(["{0:.4f}".format(round(event.alpha, 4)), event.mechanism, cause, event.purview]) elif event.direction == Direction.EFFECT: next_list.append(["{0:.4f}".format(round(event.alpha, 4)), event.mechanism, effect, event.purview]) else: validate.direction(event.direction) true_list = [(cause_list[event], next_list[event]) for event in range(len(cause_list))] return true_list
python
def nice_true_ces(tc): """Format a true |CauseEffectStructure|.""" cause_list = [] next_list = [] cause = '<--' effect = '-->' for event in tc: if event.direction == Direction.CAUSE: cause_list.append(["{0:.4f}".format(round(event.alpha, 4)), event.mechanism, cause, event.purview]) elif event.direction == Direction.EFFECT: next_list.append(["{0:.4f}".format(round(event.alpha, 4)), event.mechanism, effect, event.purview]) else: validate.direction(event.direction) true_list = [(cause_list[event], next_list[event]) for event in range(len(cause_list))] return true_list
[ "def", "nice_true_ces", "(", "tc", ")", ":", "cause_list", "=", "[", "]", "next_list", "=", "[", "]", "cause", "=", "'<--'", "effect", "=", "'-->'", "for", "event", "in", "tc", ":", "if", "event", ".", "direction", "==", "Direction", ".", "CAUSE", ":", "cause_list", ".", "append", "(", "[", "\"{0:.4f}\"", ".", "format", "(", "round", "(", "event", ".", "alpha", ",", "4", ")", ")", ",", "event", ".", "mechanism", ",", "cause", ",", "event", ".", "purview", "]", ")", "elif", "event", ".", "direction", "==", "Direction", ".", "EFFECT", ":", "next_list", ".", "append", "(", "[", "\"{0:.4f}\"", ".", "format", "(", "round", "(", "event", ".", "alpha", ",", "4", ")", ")", ",", "event", ".", "mechanism", ",", "effect", ",", "event", ".", "purview", "]", ")", "else", ":", "validate", ".", "direction", "(", "event", ".", "direction", ")", "true_list", "=", "[", "(", "cause_list", "[", "event", "]", ",", "next_list", "[", "event", "]", ")", "for", "event", "in", "range", "(", "len", "(", "cause_list", ")", ")", "]", "return", "true_list" ]
Format a true |CauseEffectStructure|.
[ "Format", "a", "true", "|CauseEffectStructure|", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/actual.py#L660-L678
train
wmayner/pyphi
pyphi/actual.py
true_ces
def true_ces(subsystem, previous_state, next_state): """Set of all sets of elements that have true causes and true effects. .. note:: Since the true |CauseEffectStructure| is always about the full system, the background conditions don't matter and the subsystem should be conditioned on the current state. """ network = subsystem.network nodes = subsystem.node_indices state = subsystem.state _events = events(network, previous_state, state, next_state, nodes) if not _events: log.info("Finished calculating, no echo events.") return None result = tuple([event.actual_cause for event in _events] + [event.actual_effect for event in _events]) log.info("Finished calculating true events.") log.debug("RESULT: \n%s", result) return result
python
def true_ces(subsystem, previous_state, next_state): """Set of all sets of elements that have true causes and true effects. .. note:: Since the true |CauseEffectStructure| is always about the full system, the background conditions don't matter and the subsystem should be conditioned on the current state. """ network = subsystem.network nodes = subsystem.node_indices state = subsystem.state _events = events(network, previous_state, state, next_state, nodes) if not _events: log.info("Finished calculating, no echo events.") return None result = tuple([event.actual_cause for event in _events] + [event.actual_effect for event in _events]) log.info("Finished calculating true events.") log.debug("RESULT: \n%s", result) return result
[ "def", "true_ces", "(", "subsystem", ",", "previous_state", ",", "next_state", ")", ":", "network", "=", "subsystem", ".", "network", "nodes", "=", "subsystem", ".", "node_indices", "state", "=", "subsystem", ".", "state", "_events", "=", "events", "(", "network", ",", "previous_state", ",", "state", ",", "next_state", ",", "nodes", ")", "if", "not", "_events", ":", "log", ".", "info", "(", "\"Finished calculating, no echo events.\"", ")", "return", "None", "result", "=", "tuple", "(", "[", "event", ".", "actual_cause", "for", "event", "in", "_events", "]", "+", "[", "event", ".", "actual_effect", "for", "event", "in", "_events", "]", ")", "log", ".", "info", "(", "\"Finished calculating true events.\"", ")", "log", ".", "debug", "(", "\"RESULT: \\n%s\"", ",", "result", ")", "return", "result" ]
Set of all sets of elements that have true causes and true effects. .. note:: Since the true |CauseEffectStructure| is always about the full system, the background conditions don't matter and the subsystem should be conditioned on the current state.
[ "Set", "of", "all", "sets", "of", "elements", "that", "have", "true", "causes", "and", "true", "effects", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/actual.py#L728-L751
train
wmayner/pyphi
pyphi/actual.py
true_events
def true_events(network, previous_state, current_state, next_state, indices=None, major_complex=None): """Return all mechanisms that have true causes and true effects within the complex. Args: network (Network): The network to analyze. previous_state (tuple[int]): The state of the network at ``t - 1``. current_state (tuple[int]): The state of the network at ``t``. next_state (tuple[int]): The state of the network at ``t + 1``. Keyword Args: indices (tuple[int]): The indices of the major complex. major_complex (AcSystemIrreducibilityAnalysis): The major complex. If ``major_complex`` is given then ``indices`` is ignored. Returns: tuple[Event]: List of true events in the major complex. """ # TODO: validate triplet of states if major_complex: nodes = major_complex.subsystem.node_indices elif indices: nodes = indices else: major_complex = compute.major_complex(network, current_state) nodes = major_complex.subsystem.node_indices return events(network, previous_state, current_state, next_state, nodes)
python
def true_events(network, previous_state, current_state, next_state, indices=None, major_complex=None): """Return all mechanisms that have true causes and true effects within the complex. Args: network (Network): The network to analyze. previous_state (tuple[int]): The state of the network at ``t - 1``. current_state (tuple[int]): The state of the network at ``t``. next_state (tuple[int]): The state of the network at ``t + 1``. Keyword Args: indices (tuple[int]): The indices of the major complex. major_complex (AcSystemIrreducibilityAnalysis): The major complex. If ``major_complex`` is given then ``indices`` is ignored. Returns: tuple[Event]: List of true events in the major complex. """ # TODO: validate triplet of states if major_complex: nodes = major_complex.subsystem.node_indices elif indices: nodes = indices else: major_complex = compute.major_complex(network, current_state) nodes = major_complex.subsystem.node_indices return events(network, previous_state, current_state, next_state, nodes)
[ "def", "true_events", "(", "network", ",", "previous_state", ",", "current_state", ",", "next_state", ",", "indices", "=", "None", ",", "major_complex", "=", "None", ")", ":", "# TODO: validate triplet of states", "if", "major_complex", ":", "nodes", "=", "major_complex", ".", "subsystem", ".", "node_indices", "elif", "indices", ":", "nodes", "=", "indices", "else", ":", "major_complex", "=", "compute", ".", "major_complex", "(", "network", ",", "current_state", ")", "nodes", "=", "major_complex", ".", "subsystem", ".", "node_indices", "return", "events", "(", "network", ",", "previous_state", ",", "current_state", ",", "next_state", ",", "nodes", ")" ]
Return all mechanisms that have true causes and true effects within the complex. Args: network (Network): The network to analyze. previous_state (tuple[int]): The state of the network at ``t - 1``. current_state (tuple[int]): The state of the network at ``t``. next_state (tuple[int]): The state of the network at ``t + 1``. Keyword Args: indices (tuple[int]): The indices of the major complex. major_complex (AcSystemIrreducibilityAnalysis): The major complex. If ``major_complex`` is given then ``indices`` is ignored. Returns: tuple[Event]: List of true events in the major complex.
[ "Return", "all", "mechanisms", "that", "have", "true", "causes", "and", "true", "effects", "within", "the", "complex", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/actual.py#L754-L783
train
wmayner/pyphi
pyphi/actual.py
extrinsic_events
def extrinsic_events(network, previous_state, current_state, next_state, indices=None, major_complex=None): """Set of all mechanisms that are in the major complex but which have true causes and effects within the entire network. Args: network (Network): The network to analyze. previous_state (tuple[int]): The state of the network at ``t - 1``. current_state (tuple[int]): The state of the network at ``t``. next_state (tuple[int]): The state of the network at ``t + 1``. Keyword Args: indices (tuple[int]): The indices of the major complex. major_complex (AcSystemIrreducibilityAnalysis): The major complex. If ``major_complex`` is given then ``indices`` is ignored. Returns: tuple(actions): List of extrinsic events in the major complex. """ if major_complex: mc_nodes = major_complex.subsystem.node_indices elif indices: mc_nodes = indices else: major_complex = compute.major_complex(network, current_state) mc_nodes = major_complex.subsystem.node_indices mechanisms = list(utils.powerset(mc_nodes, nonempty=True)) all_nodes = network.node_indices return events(network, previous_state, current_state, next_state, all_nodes, mechanisms=mechanisms)
python
def extrinsic_events(network, previous_state, current_state, next_state, indices=None, major_complex=None): """Set of all mechanisms that are in the major complex but which have true causes and effects within the entire network. Args: network (Network): The network to analyze. previous_state (tuple[int]): The state of the network at ``t - 1``. current_state (tuple[int]): The state of the network at ``t``. next_state (tuple[int]): The state of the network at ``t + 1``. Keyword Args: indices (tuple[int]): The indices of the major complex. major_complex (AcSystemIrreducibilityAnalysis): The major complex. If ``major_complex`` is given then ``indices`` is ignored. Returns: tuple(actions): List of extrinsic events in the major complex. """ if major_complex: mc_nodes = major_complex.subsystem.node_indices elif indices: mc_nodes = indices else: major_complex = compute.major_complex(network, current_state) mc_nodes = major_complex.subsystem.node_indices mechanisms = list(utils.powerset(mc_nodes, nonempty=True)) all_nodes = network.node_indices return events(network, previous_state, current_state, next_state, all_nodes, mechanisms=mechanisms)
[ "def", "extrinsic_events", "(", "network", ",", "previous_state", ",", "current_state", ",", "next_state", ",", "indices", "=", "None", ",", "major_complex", "=", "None", ")", ":", "if", "major_complex", ":", "mc_nodes", "=", "major_complex", ".", "subsystem", ".", "node_indices", "elif", "indices", ":", "mc_nodes", "=", "indices", "else", ":", "major_complex", "=", "compute", ".", "major_complex", "(", "network", ",", "current_state", ")", "mc_nodes", "=", "major_complex", ".", "subsystem", ".", "node_indices", "mechanisms", "=", "list", "(", "utils", ".", "powerset", "(", "mc_nodes", ",", "nonempty", "=", "True", ")", ")", "all_nodes", "=", "network", ".", "node_indices", "return", "events", "(", "network", ",", "previous_state", ",", "current_state", ",", "next_state", ",", "all_nodes", ",", "mechanisms", "=", "mechanisms", ")" ]
Set of all mechanisms that are in the major complex but which have true causes and effects within the entire network. Args: network (Network): The network to analyze. previous_state (tuple[int]): The state of the network at ``t - 1``. current_state (tuple[int]): The state of the network at ``t``. next_state (tuple[int]): The state of the network at ``t + 1``. Keyword Args: indices (tuple[int]): The indices of the major complex. major_complex (AcSystemIrreducibilityAnalysis): The major complex. If ``major_complex`` is given then ``indices`` is ignored. Returns: tuple(actions): List of extrinsic events in the major complex.
[ "Set", "of", "all", "mechanisms", "that", "are", "in", "the", "major", "complex", "but", "which", "have", "true", "causes", "and", "effects", "within", "the", "entire", "network", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/actual.py#L786-L817
train
wmayner/pyphi
pyphi/actual.py
Transition.apply_cut
def apply_cut(self, cut): """Return a cut version of this transition.""" return Transition(self.network, self.before_state, self.after_state, self.cause_indices, self.effect_indices, cut)
python
def apply_cut(self, cut): """Return a cut version of this transition.""" return Transition(self.network, self.before_state, self.after_state, self.cause_indices, self.effect_indices, cut)
[ "def", "apply_cut", "(", "self", ",", "cut", ")", ":", "return", "Transition", "(", "self", ".", "network", ",", "self", ".", "before_state", ",", "self", ".", "after_state", ",", "self", ".", "cause_indices", ",", "self", ".", "effect_indices", ",", "cut", ")" ]
Return a cut version of this transition.
[ "Return", "a", "cut", "version", "of", "this", "transition", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/actual.py#L176-L179
train
wmayner/pyphi
pyphi/actual.py
Transition.cause_repertoire
def cause_repertoire(self, mechanism, purview): """Return the cause repertoire.""" return self.repertoire(Direction.CAUSE, mechanism, purview)
python
def cause_repertoire(self, mechanism, purview): """Return the cause repertoire.""" return self.repertoire(Direction.CAUSE, mechanism, purview)
[ "def", "cause_repertoire", "(", "self", ",", "mechanism", ",", "purview", ")", ":", "return", "self", ".", "repertoire", "(", "Direction", ".", "CAUSE", ",", "mechanism", ",", "purview", ")" ]
Return the cause repertoire.
[ "Return", "the", "cause", "repertoire", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/actual.py#L181-L183
train
wmayner/pyphi
pyphi/actual.py
Transition.effect_repertoire
def effect_repertoire(self, mechanism, purview): """Return the effect repertoire.""" return self.repertoire(Direction.EFFECT, mechanism, purview)
python
def effect_repertoire(self, mechanism, purview): """Return the effect repertoire.""" return self.repertoire(Direction.EFFECT, mechanism, purview)
[ "def", "effect_repertoire", "(", "self", ",", "mechanism", ",", "purview", ")", ":", "return", "self", ".", "repertoire", "(", "Direction", ".", "EFFECT", ",", "mechanism", ",", "purview", ")" ]
Return the effect repertoire.
[ "Return", "the", "effect", "repertoire", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/actual.py#L185-L187
train
wmayner/pyphi
pyphi/actual.py
Transition.repertoire
def repertoire(self, direction, mechanism, purview): """Return the cause or effect repertoire function based on a direction. Args: direction (str): The temporal direction, specifiying the cause or effect repertoire. """ system = self.system[direction] node_labels = system.node_labels if not set(purview).issubset(self.purview_indices(direction)): raise ValueError('{} is not a {} purview in {}'.format( fmt.fmt_mechanism(purview, node_labels), direction, self)) if not set(mechanism).issubset(self.mechanism_indices(direction)): raise ValueError('{} is no a {} mechanism in {}'.format( fmt.fmt_mechanism(mechanism, node_labels), direction, self)) return system.repertoire(direction, mechanism, purview)
python
def repertoire(self, direction, mechanism, purview): """Return the cause or effect repertoire function based on a direction. Args: direction (str): The temporal direction, specifiying the cause or effect repertoire. """ system = self.system[direction] node_labels = system.node_labels if not set(purview).issubset(self.purview_indices(direction)): raise ValueError('{} is not a {} purview in {}'.format( fmt.fmt_mechanism(purview, node_labels), direction, self)) if not set(mechanism).issubset(self.mechanism_indices(direction)): raise ValueError('{} is no a {} mechanism in {}'.format( fmt.fmt_mechanism(mechanism, node_labels), direction, self)) return system.repertoire(direction, mechanism, purview)
[ "def", "repertoire", "(", "self", ",", "direction", ",", "mechanism", ",", "purview", ")", ":", "system", "=", "self", ".", "system", "[", "direction", "]", "node_labels", "=", "system", ".", "node_labels", "if", "not", "set", "(", "purview", ")", ".", "issubset", "(", "self", ".", "purview_indices", "(", "direction", ")", ")", ":", "raise", "ValueError", "(", "'{} is not a {} purview in {}'", ".", "format", "(", "fmt", ".", "fmt_mechanism", "(", "purview", ",", "node_labels", ")", ",", "direction", ",", "self", ")", ")", "if", "not", "set", "(", "mechanism", ")", ".", "issubset", "(", "self", ".", "mechanism_indices", "(", "direction", ")", ")", ":", "raise", "ValueError", "(", "'{} is no a {} mechanism in {}'", ".", "format", "(", "fmt", ".", "fmt_mechanism", "(", "mechanism", ",", "node_labels", ")", ",", "direction", ",", "self", ")", ")", "return", "system", ".", "repertoire", "(", "direction", ",", "mechanism", ",", "purview", ")" ]
Return the cause or effect repertoire function based on a direction. Args: direction (str): The temporal direction, specifiying the cause or effect repertoire.
[ "Return", "the", "cause", "or", "effect", "repertoire", "function", "based", "on", "a", "direction", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/actual.py#L197-L215
train
wmayner/pyphi
pyphi/actual.py
Transition.state_probability
def state_probability(self, direction, repertoire, purview,): """Compute the probability of the purview in its current state given the repertoire. Collapses the dimensions of the repertoire that correspond to the purview nodes onto their state. All other dimension are already singular and thus receive 0 as the conditioning index. Returns: float: A single probabilty. """ purview_state = self.purview_state(direction) index = tuple(node_state if node in purview else 0 for node, node_state in enumerate(purview_state)) return repertoire[index]
python
def state_probability(self, direction, repertoire, purview,): """Compute the probability of the purview in its current state given the repertoire. Collapses the dimensions of the repertoire that correspond to the purview nodes onto their state. All other dimension are already singular and thus receive 0 as the conditioning index. Returns: float: A single probabilty. """ purview_state = self.purview_state(direction) index = tuple(node_state if node in purview else 0 for node, node_state in enumerate(purview_state)) return repertoire[index]
[ "def", "state_probability", "(", "self", ",", "direction", ",", "repertoire", ",", "purview", ",", ")", ":", "purview_state", "=", "self", ".", "purview_state", "(", "direction", ")", "index", "=", "tuple", "(", "node_state", "if", "node", "in", "purview", "else", "0", "for", "node", ",", "node_state", "in", "enumerate", "(", "purview_state", ")", ")", "return", "repertoire", "[", "index", "]" ]
Compute the probability of the purview in its current state given the repertoire. Collapses the dimensions of the repertoire that correspond to the purview nodes onto their state. All other dimension are already singular and thus receive 0 as the conditioning index. Returns: float: A single probabilty.
[ "Compute", "the", "probability", "of", "the", "purview", "in", "its", "current", "state", "given", "the", "repertoire", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/actual.py#L217-L232
train
wmayner/pyphi
pyphi/actual.py
Transition.probability
def probability(self, direction, mechanism, purview): """Probability that the purview is in it's current state given the state of the mechanism. """ repertoire = self.repertoire(direction, mechanism, purview) return self.state_probability(direction, repertoire, purview)
python
def probability(self, direction, mechanism, purview): """Probability that the purview is in it's current state given the state of the mechanism. """ repertoire = self.repertoire(direction, mechanism, purview) return self.state_probability(direction, repertoire, purview)
[ "def", "probability", "(", "self", ",", "direction", ",", "mechanism", ",", "purview", ")", ":", "repertoire", "=", "self", ".", "repertoire", "(", "direction", ",", "mechanism", ",", "purview", ")", "return", "self", ".", "state_probability", "(", "direction", ",", "repertoire", ",", "purview", ")" ]
Probability that the purview is in it's current state given the state of the mechanism.
[ "Probability", "that", "the", "purview", "is", "in", "it", "s", "current", "state", "given", "the", "state", "of", "the", "mechanism", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/actual.py#L234-L240
train
wmayner/pyphi
pyphi/actual.py
Transition.purview_state
def purview_state(self, direction): """The state of the purview when we are computing coefficients in ``direction``. For example, if we are computing the cause coefficient of a mechanism in ``after_state``, the direction is``CAUSE`` and the ``purview_state`` is ``before_state``. """ return { Direction.CAUSE: self.before_state, Direction.EFFECT: self.after_state }[direction]
python
def purview_state(self, direction): """The state of the purview when we are computing coefficients in ``direction``. For example, if we are computing the cause coefficient of a mechanism in ``after_state``, the direction is``CAUSE`` and the ``purview_state`` is ``before_state``. """ return { Direction.CAUSE: self.before_state, Direction.EFFECT: self.after_state }[direction]
[ "def", "purview_state", "(", "self", ",", "direction", ")", ":", "return", "{", "Direction", ".", "CAUSE", ":", "self", ".", "before_state", ",", "Direction", ".", "EFFECT", ":", "self", ".", "after_state", "}", "[", "direction", "]" ]
The state of the purview when we are computing coefficients in ``direction``. For example, if we are computing the cause coefficient of a mechanism in ``after_state``, the direction is``CAUSE`` and the ``purview_state`` is ``before_state``.
[ "The", "state", "of", "the", "purview", "when", "we", "are", "computing", "coefficients", "in", "direction", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/actual.py#L246-L257
train
wmayner/pyphi
pyphi/actual.py
Transition.mechanism_indices
def mechanism_indices(self, direction): """The indices of nodes in the mechanism system.""" return { Direction.CAUSE: self.effect_indices, Direction.EFFECT: self.cause_indices }[direction]
python
def mechanism_indices(self, direction): """The indices of nodes in the mechanism system.""" return { Direction.CAUSE: self.effect_indices, Direction.EFFECT: self.cause_indices }[direction]
[ "def", "mechanism_indices", "(", "self", ",", "direction", ")", ":", "return", "{", "Direction", ".", "CAUSE", ":", "self", ".", "effect_indices", ",", "Direction", ".", "EFFECT", ":", "self", ".", "cause_indices", "}", "[", "direction", "]" ]
The indices of nodes in the mechanism system.
[ "The", "indices", "of", "nodes", "in", "the", "mechanism", "system", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/actual.py#L265-L270
train
wmayner/pyphi
pyphi/actual.py
Transition.purview_indices
def purview_indices(self, direction): """The indices of nodes in the purview system.""" return { Direction.CAUSE: self.cause_indices, Direction.EFFECT: self.effect_indices }[direction]
python
def purview_indices(self, direction): """The indices of nodes in the purview system.""" return { Direction.CAUSE: self.cause_indices, Direction.EFFECT: self.effect_indices }[direction]
[ "def", "purview_indices", "(", "self", ",", "direction", ")", ":", "return", "{", "Direction", ".", "CAUSE", ":", "self", ".", "cause_indices", ",", "Direction", ".", "EFFECT", ":", "self", ".", "effect_indices", "}", "[", "direction", "]" ]
The indices of nodes in the purview system.
[ "The", "indices", "of", "nodes", "in", "the", "purview", "system", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/actual.py#L272-L277
train
wmayner/pyphi
pyphi/actual.py
Transition.cause_ratio
def cause_ratio(self, mechanism, purview): """The cause ratio of the ``purview`` given ``mechanism``.""" return self._ratio(Direction.CAUSE, mechanism, purview)
python
def cause_ratio(self, mechanism, purview): """The cause ratio of the ``purview`` given ``mechanism``.""" return self._ratio(Direction.CAUSE, mechanism, purview)
[ "def", "cause_ratio", "(", "self", ",", "mechanism", ",", "purview", ")", ":", "return", "self", ".", "_ratio", "(", "Direction", ".", "CAUSE", ",", "mechanism", ",", "purview", ")" ]
The cause ratio of the ``purview`` given ``mechanism``.
[ "The", "cause", "ratio", "of", "the", "purview", "given", "mechanism", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/actual.py#L283-L285
train
wmayner/pyphi
pyphi/actual.py
Transition.effect_ratio
def effect_ratio(self, mechanism, purview): """The effect ratio of the ``purview`` given ``mechanism``.""" return self._ratio(Direction.EFFECT, mechanism, purview)
python
def effect_ratio(self, mechanism, purview): """The effect ratio of the ``purview`` given ``mechanism``.""" return self._ratio(Direction.EFFECT, mechanism, purview)
[ "def", "effect_ratio", "(", "self", ",", "mechanism", ",", "purview", ")", ":", "return", "self", ".", "_ratio", "(", "Direction", ".", "EFFECT", ",", "mechanism", ",", "purview", ")" ]
The effect ratio of the ``purview`` given ``mechanism``.
[ "The", "effect", "ratio", "of", "the", "purview", "given", "mechanism", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/actual.py#L287-L289
train
wmayner/pyphi
pyphi/actual.py
Transition.partitioned_repertoire
def partitioned_repertoire(self, direction, partition): """Compute the repertoire over the partition in the given direction.""" system = self.system[direction] return system.partitioned_repertoire(direction, partition)
python
def partitioned_repertoire(self, direction, partition): """Compute the repertoire over the partition in the given direction.""" system = self.system[direction] return system.partitioned_repertoire(direction, partition)
[ "def", "partitioned_repertoire", "(", "self", ",", "direction", ",", "partition", ")", ":", "system", "=", "self", ".", "system", "[", "direction", "]", "return", "system", ".", "partitioned_repertoire", "(", "direction", ",", "partition", ")" ]
Compute the repertoire over the partition in the given direction.
[ "Compute", "the", "repertoire", "over", "the", "partition", "in", "the", "given", "direction", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/actual.py#L291-L294
train
wmayner/pyphi
pyphi/actual.py
Transition.partitioned_probability
def partitioned_probability(self, direction, partition): """Compute the probability of the mechanism over the purview in the partition. """ repertoire = self.partitioned_repertoire(direction, partition) return self.state_probability(direction, repertoire, partition.purview)
python
def partitioned_probability(self, direction, partition): """Compute the probability of the mechanism over the purview in the partition. """ repertoire = self.partitioned_repertoire(direction, partition) return self.state_probability(direction, repertoire, partition.purview)
[ "def", "partitioned_probability", "(", "self", ",", "direction", ",", "partition", ")", ":", "repertoire", "=", "self", ".", "partitioned_repertoire", "(", "direction", ",", "partition", ")", "return", "self", ".", "state_probability", "(", "direction", ",", "repertoire", ",", "partition", ".", "purview", ")" ]
Compute the probability of the mechanism over the purview in the partition.
[ "Compute", "the", "probability", "of", "the", "mechanism", "over", "the", "purview", "in", "the", "partition", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/actual.py#L296-L301
train
wmayner/pyphi
pyphi/actual.py
Transition.find_mip
def find_mip(self, direction, mechanism, purview, allow_neg=False): """Find the ratio minimum information partition for a mechanism over a purview. Args: direction (str): |CAUSE| or |EFFECT| mechanism (tuple[int]): A mechanism. purview (tuple[int]): A purview. Keyword Args: allow_neg (boolean): If true, ``alpha`` is allowed to be negative. Otherwise, negative values of ``alpha`` will be treated as if they were 0. Returns: AcRepertoireIrreducibilityAnalysis: The irreducibility analysis for the mechanism. """ alpha_min = float('inf') probability = self.probability(direction, mechanism, purview) for partition in mip_partitions(mechanism, purview, self.node_labels): partitioned_probability = self.partitioned_probability( direction, partition) alpha = log2(probability / partitioned_probability) # First check for 0 # Default: don't count contrary causes and effects if utils.eq(alpha, 0) or (alpha < 0 and not allow_neg): return AcRepertoireIrreducibilityAnalysis( state=self.mechanism_state(direction), direction=direction, mechanism=mechanism, purview=purview, partition=partition, probability=probability, partitioned_probability=partitioned_probability, node_labels=self.node_labels, alpha=0.0 ) # Then take closest to 0 if (abs(alpha_min) - abs(alpha)) > constants.EPSILON: alpha_min = alpha acria = AcRepertoireIrreducibilityAnalysis( state=self.mechanism_state(direction), direction=direction, mechanism=mechanism, purview=purview, partition=partition, probability=probability, partitioned_probability=partitioned_probability, node_labels=self.node_labels, alpha=alpha_min ) return acria
python
def find_mip(self, direction, mechanism, purview, allow_neg=False): """Find the ratio minimum information partition for a mechanism over a purview. Args: direction (str): |CAUSE| or |EFFECT| mechanism (tuple[int]): A mechanism. purview (tuple[int]): A purview. Keyword Args: allow_neg (boolean): If true, ``alpha`` is allowed to be negative. Otherwise, negative values of ``alpha`` will be treated as if they were 0. Returns: AcRepertoireIrreducibilityAnalysis: The irreducibility analysis for the mechanism. """ alpha_min = float('inf') probability = self.probability(direction, mechanism, purview) for partition in mip_partitions(mechanism, purview, self.node_labels): partitioned_probability = self.partitioned_probability( direction, partition) alpha = log2(probability / partitioned_probability) # First check for 0 # Default: don't count contrary causes and effects if utils.eq(alpha, 0) or (alpha < 0 and not allow_neg): return AcRepertoireIrreducibilityAnalysis( state=self.mechanism_state(direction), direction=direction, mechanism=mechanism, purview=purview, partition=partition, probability=probability, partitioned_probability=partitioned_probability, node_labels=self.node_labels, alpha=0.0 ) # Then take closest to 0 if (abs(alpha_min) - abs(alpha)) > constants.EPSILON: alpha_min = alpha acria = AcRepertoireIrreducibilityAnalysis( state=self.mechanism_state(direction), direction=direction, mechanism=mechanism, purview=purview, partition=partition, probability=probability, partitioned_probability=partitioned_probability, node_labels=self.node_labels, alpha=alpha_min ) return acria
[ "def", "find_mip", "(", "self", ",", "direction", ",", "mechanism", ",", "purview", ",", "allow_neg", "=", "False", ")", ":", "alpha_min", "=", "float", "(", "'inf'", ")", "probability", "=", "self", ".", "probability", "(", "direction", ",", "mechanism", ",", "purview", ")", "for", "partition", "in", "mip_partitions", "(", "mechanism", ",", "purview", ",", "self", ".", "node_labels", ")", ":", "partitioned_probability", "=", "self", ".", "partitioned_probability", "(", "direction", ",", "partition", ")", "alpha", "=", "log2", "(", "probability", "/", "partitioned_probability", ")", "# First check for 0", "# Default: don't count contrary causes and effects", "if", "utils", ".", "eq", "(", "alpha", ",", "0", ")", "or", "(", "alpha", "<", "0", "and", "not", "allow_neg", ")", ":", "return", "AcRepertoireIrreducibilityAnalysis", "(", "state", "=", "self", ".", "mechanism_state", "(", "direction", ")", ",", "direction", "=", "direction", ",", "mechanism", "=", "mechanism", ",", "purview", "=", "purview", ",", "partition", "=", "partition", ",", "probability", "=", "probability", ",", "partitioned_probability", "=", "partitioned_probability", ",", "node_labels", "=", "self", ".", "node_labels", ",", "alpha", "=", "0.0", ")", "# Then take closest to 0", "if", "(", "abs", "(", "alpha_min", ")", "-", "abs", "(", "alpha", ")", ")", ">", "constants", ".", "EPSILON", ":", "alpha_min", "=", "alpha", "acria", "=", "AcRepertoireIrreducibilityAnalysis", "(", "state", "=", "self", ".", "mechanism_state", "(", "direction", ")", ",", "direction", "=", "direction", ",", "mechanism", "=", "mechanism", ",", "purview", "=", "purview", ",", "partition", "=", "partition", ",", "probability", "=", "probability", ",", "partitioned_probability", "=", "partitioned_probability", ",", "node_labels", "=", "self", ".", "node_labels", ",", "alpha", "=", "alpha_min", ")", "return", "acria" ]
Find the ratio minimum information partition for a mechanism over a purview. Args: direction (str): |CAUSE| or |EFFECT| mechanism (tuple[int]): A mechanism. purview (tuple[int]): A purview. Keyword Args: allow_neg (boolean): If true, ``alpha`` is allowed to be negative. Otherwise, negative values of ``alpha`` will be treated as if they were 0. Returns: AcRepertoireIrreducibilityAnalysis: The irreducibility analysis for the mechanism.
[ "Find", "the", "ratio", "minimum", "information", "partition", "for", "a", "mechanism", "over", "a", "purview", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/actual.py#L307-L361
train
wmayner/pyphi
pyphi/actual.py
Transition.find_causal_link
def find_causal_link(self, direction, mechanism, purviews=False, allow_neg=False): """Return the maximally irreducible cause or effect ratio for a mechanism. Args: direction (str): The temporal direction, specifying cause or effect. mechanism (tuple[int]): The mechanism to be tested for irreducibility. Keyword Args: purviews (tuple[int]): Optionally restrict the possible purviews to a subset of the subsystem. This may be useful for _e.g._ finding only concepts that are "about" a certain subset of nodes. Returns: CausalLink: The maximally-irreducible actual cause or effect. """ purviews = self.potential_purviews(direction, mechanism, purviews) # Find the maximal RIA over the remaining purviews. if not purviews: max_ria = _null_ac_ria(self.mechanism_state(direction), direction, mechanism, None) else: # This max should be most positive max_ria = max(self.find_mip(direction, mechanism, purview, allow_neg) for purview in purviews) # Construct the corresponding CausalLink return CausalLink(max_ria)
python
def find_causal_link(self, direction, mechanism, purviews=False, allow_neg=False): """Return the maximally irreducible cause or effect ratio for a mechanism. Args: direction (str): The temporal direction, specifying cause or effect. mechanism (tuple[int]): The mechanism to be tested for irreducibility. Keyword Args: purviews (tuple[int]): Optionally restrict the possible purviews to a subset of the subsystem. This may be useful for _e.g._ finding only concepts that are "about" a certain subset of nodes. Returns: CausalLink: The maximally-irreducible actual cause or effect. """ purviews = self.potential_purviews(direction, mechanism, purviews) # Find the maximal RIA over the remaining purviews. if not purviews: max_ria = _null_ac_ria(self.mechanism_state(direction), direction, mechanism, None) else: # This max should be most positive max_ria = max(self.find_mip(direction, mechanism, purview, allow_neg) for purview in purviews) # Construct the corresponding CausalLink return CausalLink(max_ria)
[ "def", "find_causal_link", "(", "self", ",", "direction", ",", "mechanism", ",", "purviews", "=", "False", ",", "allow_neg", "=", "False", ")", ":", "purviews", "=", "self", ".", "potential_purviews", "(", "direction", ",", "mechanism", ",", "purviews", ")", "# Find the maximal RIA over the remaining purviews.", "if", "not", "purviews", ":", "max_ria", "=", "_null_ac_ria", "(", "self", ".", "mechanism_state", "(", "direction", ")", ",", "direction", ",", "mechanism", ",", "None", ")", "else", ":", "# This max should be most positive", "max_ria", "=", "max", "(", "self", ".", "find_mip", "(", "direction", ",", "mechanism", ",", "purview", ",", "allow_neg", ")", "for", "purview", "in", "purviews", ")", "# Construct the corresponding CausalLink", "return", "CausalLink", "(", "max_ria", ")" ]
Return the maximally irreducible cause or effect ratio for a mechanism. Args: direction (str): The temporal direction, specifying cause or effect. mechanism (tuple[int]): The mechanism to be tested for irreducibility. Keyword Args: purviews (tuple[int]): Optionally restrict the possible purviews to a subset of the subsystem. This may be useful for _e.g._ finding only concepts that are "about" a certain subset of nodes. Returns: CausalLink: The maximally-irreducible actual cause or effect.
[ "Return", "the", "maximally", "irreducible", "cause", "or", "effect", "ratio", "for", "a", "mechanism", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/actual.py#L387-L420
train
wmayner/pyphi
pyphi/actual.py
Transition.find_actual_cause
def find_actual_cause(self, mechanism, purviews=False): """Return the actual cause of a mechanism.""" return self.find_causal_link(Direction.CAUSE, mechanism, purviews)
python
def find_actual_cause(self, mechanism, purviews=False): """Return the actual cause of a mechanism.""" return self.find_causal_link(Direction.CAUSE, mechanism, purviews)
[ "def", "find_actual_cause", "(", "self", ",", "mechanism", ",", "purviews", "=", "False", ")", ":", "return", "self", ".", "find_causal_link", "(", "Direction", ".", "CAUSE", ",", "mechanism", ",", "purviews", ")" ]
Return the actual cause of a mechanism.
[ "Return", "the", "actual", "cause", "of", "a", "mechanism", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/actual.py#L422-L424
train
wmayner/pyphi
pyphi/actual.py
Transition.find_actual_effect
def find_actual_effect(self, mechanism, purviews=False): """Return the actual effect of a mechanism.""" return self.find_causal_link(Direction.EFFECT, mechanism, purviews)
python
def find_actual_effect(self, mechanism, purviews=False): """Return the actual effect of a mechanism.""" return self.find_causal_link(Direction.EFFECT, mechanism, purviews)
[ "def", "find_actual_effect", "(", "self", ",", "mechanism", ",", "purviews", "=", "False", ")", ":", "return", "self", ".", "find_causal_link", "(", "Direction", ".", "EFFECT", ",", "mechanism", ",", "purviews", ")" ]
Return the actual effect of a mechanism.
[ "Return", "the", "actual", "effect", "of", "a", "mechanism", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/actual.py#L426-L428
train
wmayner/pyphi
pyphi/db.py
find
def find(key): """Return the value associated with a key. If there is no value with the given key, returns ``None``. """ docs = list(collection.find({KEY_FIELD: key})) # Return None if we didn't find anything. if not docs: return None pickled_value = docs[0][VALUE_FIELD] # Unpickle and return the value. return pickle.loads(pickled_value)
python
def find(key): """Return the value associated with a key. If there is no value with the given key, returns ``None``. """ docs = list(collection.find({KEY_FIELD: key})) # Return None if we didn't find anything. if not docs: return None pickled_value = docs[0][VALUE_FIELD] # Unpickle and return the value. return pickle.loads(pickled_value)
[ "def", "find", "(", "key", ")", ":", "docs", "=", "list", "(", "collection", ".", "find", "(", "{", "KEY_FIELD", ":", "key", "}", ")", ")", "# Return None if we didn't find anything.", "if", "not", "docs", ":", "return", "None", "pickled_value", "=", "docs", "[", "0", "]", "[", "VALUE_FIELD", "]", "# Unpickle and return the value.", "return", "pickle", ".", "loads", "(", "pickled_value", ")" ]
Return the value associated with a key. If there is no value with the given key, returns ``None``.
[ "Return", "the", "value", "associated", "with", "a", "key", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/db.py#L34-L45
train
wmayner/pyphi
pyphi/db.py
insert
def insert(key, value): """Store a value with a key. If the key is already present in the database, this does nothing. """ # Pickle the value. value = pickle.dumps(value, protocol=constants.PICKLE_PROTOCOL) # Store the value as binary data in a document. doc = { KEY_FIELD: key, VALUE_FIELD: Binary(value) } # Pickle and store the value with its key. If the key already exists, we # don't insert (since the key is a unique index), and we don't care. try: return collection.insert(doc) except pymongo.errors.DuplicateKeyError: return None
python
def insert(key, value): """Store a value with a key. If the key is already present in the database, this does nothing. """ # Pickle the value. value = pickle.dumps(value, protocol=constants.PICKLE_PROTOCOL) # Store the value as binary data in a document. doc = { KEY_FIELD: key, VALUE_FIELD: Binary(value) } # Pickle and store the value with its key. If the key already exists, we # don't insert (since the key is a unique index), and we don't care. try: return collection.insert(doc) except pymongo.errors.DuplicateKeyError: return None
[ "def", "insert", "(", "key", ",", "value", ")", ":", "# Pickle the value.", "value", "=", "pickle", ".", "dumps", "(", "value", ",", "protocol", "=", "constants", ".", "PICKLE_PROTOCOL", ")", "# Store the value as binary data in a document.", "doc", "=", "{", "KEY_FIELD", ":", "key", ",", "VALUE_FIELD", ":", "Binary", "(", "value", ")", "}", "# Pickle and store the value with its key. If the key already exists, we", "# don't insert (since the key is a unique index), and we don't care.", "try", ":", "return", "collection", ".", "insert", "(", "doc", ")", "except", "pymongo", ".", "errors", ".", "DuplicateKeyError", ":", "return", "None" ]
Store a value with a key. If the key is already present in the database, this does nothing.
[ "Store", "a", "value", "with", "a", "key", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/db.py#L48-L65
train
wmayner/pyphi
pyphi/db.py
generate_key
def generate_key(filtered_args): """Get a key from some input. This function should be used whenever a key is needed, to keep keys consistent. """ # Convert the value to a (potentially singleton) tuple to be consistent # with joblib.filtered_args. if isinstance(filtered_args, Iterable): return hash(tuple(filtered_args)) return hash((filtered_args,))
python
def generate_key(filtered_args): """Get a key from some input. This function should be used whenever a key is needed, to keep keys consistent. """ # Convert the value to a (potentially singleton) tuple to be consistent # with joblib.filtered_args. if isinstance(filtered_args, Iterable): return hash(tuple(filtered_args)) return hash((filtered_args,))
[ "def", "generate_key", "(", "filtered_args", ")", ":", "# Convert the value to a (potentially singleton) tuple to be consistent", "# with joblib.filtered_args.", "if", "isinstance", "(", "filtered_args", ",", "Iterable", ")", ":", "return", "hash", "(", "tuple", "(", "filtered_args", ")", ")", "return", "hash", "(", "(", "filtered_args", ",", ")", ")" ]
Get a key from some input. This function should be used whenever a key is needed, to keep keys consistent.
[ "Get", "a", "key", "from", "some", "input", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/db.py#L69-L79
train
wmayner/pyphi
pyphi/memory.py
cache
def cache(ignore=None): """Decorator for memoizing a function using either the filesystem or a database. """ def decorator(func): # Initialize both cached versions joblib_cached = constants.joblib_memory.cache(func, ignore=ignore) db_cached = DbMemoizedFunc(func, ignore) @functools.wraps(func) def wrapper(*args, **kwargs): """Dynamically choose the cache at call-time, not at import.""" if func.__name__ == '_sia' and not config.CACHE_SIAS: f = func elif config.CACHING_BACKEND == 'fs': f = joblib_cached elif config.CACHING_BACKEND == 'db': f = db_cached return f(*args, **kwargs) return wrapper return decorator
python
def cache(ignore=None): """Decorator for memoizing a function using either the filesystem or a database. """ def decorator(func): # Initialize both cached versions joblib_cached = constants.joblib_memory.cache(func, ignore=ignore) db_cached = DbMemoizedFunc(func, ignore) @functools.wraps(func) def wrapper(*args, **kwargs): """Dynamically choose the cache at call-time, not at import.""" if func.__name__ == '_sia' and not config.CACHE_SIAS: f = func elif config.CACHING_BACKEND == 'fs': f = joblib_cached elif config.CACHING_BACKEND == 'db': f = db_cached return f(*args, **kwargs) return wrapper return decorator
[ "def", "cache", "(", "ignore", "=", "None", ")", ":", "def", "decorator", "(", "func", ")", ":", "# Initialize both cached versions", "joblib_cached", "=", "constants", ".", "joblib_memory", ".", "cache", "(", "func", ",", "ignore", "=", "ignore", ")", "db_cached", "=", "DbMemoizedFunc", "(", "func", ",", "ignore", ")", "@", "functools", ".", "wraps", "(", "func", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "\"\"\"Dynamically choose the cache at call-time, not at import.\"\"\"", "if", "func", ".", "__name__", "==", "'_sia'", "and", "not", "config", ".", "CACHE_SIAS", ":", "f", "=", "func", "elif", "config", ".", "CACHING_BACKEND", "==", "'fs'", ":", "f", "=", "joblib_cached", "elif", "config", ".", "CACHING_BACKEND", "==", "'db'", ":", "f", "=", "db_cached", "return", "f", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "wrapper", "return", "decorator" ]
Decorator for memoizing a function using either the filesystem or a database.
[ "Decorator", "for", "memoizing", "a", "function", "using", "either", "the", "filesystem", "or", "a", "database", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/memory.py#L18-L39
train
wmayner/pyphi
pyphi/memory.py
DbMemoizedFunc.get_output_key
def get_output_key(self, args, kwargs): """Return the key that the output should be cached with, given arguments, keyword arguments, and a list of arguments to ignore. """ # Get a dictionary mapping argument names to argument values where # ignored arguments are omitted. filtered_args = joblib.func_inspect.filter_args( self.func, self.ignore, args, kwargs) # Get a sorted tuple of the filtered argument. filtered_args = tuple(sorted(filtered_args.values())) # Use native hash when hashing arguments. return db.generate_key(filtered_args)
python
def get_output_key(self, args, kwargs): """Return the key that the output should be cached with, given arguments, keyword arguments, and a list of arguments to ignore. """ # Get a dictionary mapping argument names to argument values where # ignored arguments are omitted. filtered_args = joblib.func_inspect.filter_args( self.func, self.ignore, args, kwargs) # Get a sorted tuple of the filtered argument. filtered_args = tuple(sorted(filtered_args.values())) # Use native hash when hashing arguments. return db.generate_key(filtered_args)
[ "def", "get_output_key", "(", "self", ",", "args", ",", "kwargs", ")", ":", "# Get a dictionary mapping argument names to argument values where", "# ignored arguments are omitted.", "filtered_args", "=", "joblib", ".", "func_inspect", ".", "filter_args", "(", "self", ".", "func", ",", "self", ".", "ignore", ",", "args", ",", "kwargs", ")", "# Get a sorted tuple of the filtered argument.", "filtered_args", "=", "tuple", "(", "sorted", "(", "filtered_args", ".", "values", "(", ")", ")", ")", "# Use native hash when hashing arguments.", "return", "db", ".", "generate_key", "(", "filtered_args", ")" ]
Return the key that the output should be cached with, given arguments, keyword arguments, and a list of arguments to ignore.
[ "Return", "the", "key", "that", "the", "output", "should", "be", "cached", "with", "given", "arguments", "keyword", "arguments", "and", "a", "list", "of", "arguments", "to", "ignore", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/memory.py#L73-L84
train
wmayner/pyphi
pyphi/memory.py
DbMemoizedFunc.load_output
def load_output(self, args, kwargs): """Return cached output.""" return db.find(self.get_output_key(args, kwargs))
python
def load_output(self, args, kwargs): """Return cached output.""" return db.find(self.get_output_key(args, kwargs))
[ "def", "load_output", "(", "self", ",", "args", ",", "kwargs", ")", ":", "return", "db", ".", "find", "(", "self", ".", "get_output_key", "(", "args", ",", "kwargs", ")", ")" ]
Return cached output.
[ "Return", "cached", "output", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/memory.py#L86-L88
train
wmayner/pyphi
pyphi/subsystem.py
Subsystem.cache_info
def cache_info(self): """Report repertoire cache statistics.""" return { 'single_node_repertoire': self._single_node_repertoire_cache.info(), 'repertoire': self._repertoire_cache.info(), 'mice': self._mice_cache.info() }
python
def cache_info(self): """Report repertoire cache statistics.""" return { 'single_node_repertoire': self._single_node_repertoire_cache.info(), 'repertoire': self._repertoire_cache.info(), 'mice': self._mice_cache.info() }
[ "def", "cache_info", "(", "self", ")", ":", "return", "{", "'single_node_repertoire'", ":", "self", ".", "_single_node_repertoire_cache", ".", "info", "(", ")", ",", "'repertoire'", ":", "self", ".", "_repertoire_cache", ".", "info", "(", ")", ",", "'mice'", ":", "self", ".", "_mice_cache", ".", "info", "(", ")", "}" ]
Report repertoire cache statistics.
[ "Report", "repertoire", "cache", "statistics", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/subsystem.py#L171-L178
train
wmayner/pyphi
pyphi/subsystem.py
Subsystem.clear_caches
def clear_caches(self): """Clear the mice and repertoire caches.""" self._single_node_repertoire_cache.clear() self._repertoire_cache.clear() self._mice_cache.clear()
python
def clear_caches(self): """Clear the mice and repertoire caches.""" self._single_node_repertoire_cache.clear() self._repertoire_cache.clear() self._mice_cache.clear()
[ "def", "clear_caches", "(", "self", ")", ":", "self", ".", "_single_node_repertoire_cache", ".", "clear", "(", ")", "self", ".", "_repertoire_cache", ".", "clear", "(", ")", "self", ".", "_mice_cache", ".", "clear", "(", ")" ]
Clear the mice and repertoire caches.
[ "Clear", "the", "mice", "and", "repertoire", "caches", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/subsystem.py#L180-L184
train
wmayner/pyphi
pyphi/subsystem.py
Subsystem.apply_cut
def apply_cut(self, cut): """Return a cut version of this |Subsystem|. Args: cut (Cut): The cut to apply to this |Subsystem|. Returns: Subsystem: The cut subsystem. """ return Subsystem(self.network, self.state, self.node_indices, cut=cut, mice_cache=self._mice_cache)
python
def apply_cut(self, cut): """Return a cut version of this |Subsystem|. Args: cut (Cut): The cut to apply to this |Subsystem|. Returns: Subsystem: The cut subsystem. """ return Subsystem(self.network, self.state, self.node_indices, cut=cut, mice_cache=self._mice_cache)
[ "def", "apply_cut", "(", "self", ",", "cut", ")", ":", "return", "Subsystem", "(", "self", ".", "network", ",", "self", ".", "state", ",", "self", ".", "node_indices", ",", "cut", "=", "cut", ",", "mice_cache", "=", "self", ".", "_mice_cache", ")" ]
Return a cut version of this |Subsystem|. Args: cut (Cut): The cut to apply to this |Subsystem|. Returns: Subsystem: The cut subsystem.
[ "Return", "a", "cut", "version", "of", "this", "|Subsystem|", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/subsystem.py#L247-L257
train
wmayner/pyphi
pyphi/subsystem.py
Subsystem.indices2nodes
def indices2nodes(self, indices): """Return |Nodes| for these indices. Args: indices (tuple[int]): The indices in question. Returns: tuple[Node]: The |Node| objects corresponding to these indices. Raises: ValueError: If requested indices are not in the subsystem. """ if set(indices) - set(self.node_indices): raise ValueError( "`indices` must be a subset of the Subsystem's indices.") return tuple(self._index2node[n] for n in indices)
python
def indices2nodes(self, indices): """Return |Nodes| for these indices. Args: indices (tuple[int]): The indices in question. Returns: tuple[Node]: The |Node| objects corresponding to these indices. Raises: ValueError: If requested indices are not in the subsystem. """ if set(indices) - set(self.node_indices): raise ValueError( "`indices` must be a subset of the Subsystem's indices.") return tuple(self._index2node[n] for n in indices)
[ "def", "indices2nodes", "(", "self", ",", "indices", ")", ":", "if", "set", "(", "indices", ")", "-", "set", "(", "self", ".", "node_indices", ")", ":", "raise", "ValueError", "(", "\"`indices` must be a subset of the Subsystem's indices.\"", ")", "return", "tuple", "(", "self", ".", "_index2node", "[", "n", "]", "for", "n", "in", "indices", ")" ]
Return |Nodes| for these indices. Args: indices (tuple[int]): The indices in question. Returns: tuple[Node]: The |Node| objects corresponding to these indices. Raises: ValueError: If requested indices are not in the subsystem.
[ "Return", "|Nodes|", "for", "these", "indices", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/subsystem.py#L259-L274
train
wmayner/pyphi
pyphi/subsystem.py
Subsystem.cause_repertoire
def cause_repertoire(self, mechanism, purview): """Return the cause repertoire of a mechanism over a purview. Args: mechanism (tuple[int]): The mechanism for which to calculate the cause repertoire. purview (tuple[int]): The purview over which to calculate the cause repertoire. Returns: np.ndarray: The cause repertoire of the mechanism over the purview. .. note:: The returned repertoire is a distribution over purview node states, not the states of the whole network. """ # If the purview is empty, the distribution is empty; return the # multiplicative identity. if not purview: return np.array([1.0]) # If the mechanism is empty, nothing is specified about the previous # state of the purview; return the purview's maximum entropy # distribution. if not mechanism: return max_entropy_distribution(purview, self.tpm_size) # Use a frozenset so the arguments to `_single_node_cause_repertoire` # can be hashed and cached. purview = frozenset(purview) # Preallocate the repertoire with the proper shape, so that # probabilities are broadcasted appropriately. joint = np.ones(repertoire_shape(purview, self.tpm_size)) # The cause repertoire is the product of the cause repertoires of the # individual nodes. joint *= functools.reduce( np.multiply, [self._single_node_cause_repertoire(m, purview) for m in mechanism] ) # The resulting joint distribution is over previous states, which are # rows in the TPM, so the distribution is a column. The columns of a # TPM don't necessarily sum to 1, so we normalize. return distribution.normalize(joint)
python
def cause_repertoire(self, mechanism, purview): """Return the cause repertoire of a mechanism over a purview. Args: mechanism (tuple[int]): The mechanism for which to calculate the cause repertoire. purview (tuple[int]): The purview over which to calculate the cause repertoire. Returns: np.ndarray: The cause repertoire of the mechanism over the purview. .. note:: The returned repertoire is a distribution over purview node states, not the states of the whole network. """ # If the purview is empty, the distribution is empty; return the # multiplicative identity. if not purview: return np.array([1.0]) # If the mechanism is empty, nothing is specified about the previous # state of the purview; return the purview's maximum entropy # distribution. if not mechanism: return max_entropy_distribution(purview, self.tpm_size) # Use a frozenset so the arguments to `_single_node_cause_repertoire` # can be hashed and cached. purview = frozenset(purview) # Preallocate the repertoire with the proper shape, so that # probabilities are broadcasted appropriately. joint = np.ones(repertoire_shape(purview, self.tpm_size)) # The cause repertoire is the product of the cause repertoires of the # individual nodes. joint *= functools.reduce( np.multiply, [self._single_node_cause_repertoire(m, purview) for m in mechanism] ) # The resulting joint distribution is over previous states, which are # rows in the TPM, so the distribution is a column. The columns of a # TPM don't necessarily sum to 1, so we normalize. return distribution.normalize(joint)
[ "def", "cause_repertoire", "(", "self", ",", "mechanism", ",", "purview", ")", ":", "# If the purview is empty, the distribution is empty; return the", "# multiplicative identity.", "if", "not", "purview", ":", "return", "np", ".", "array", "(", "[", "1.0", "]", ")", "# If the mechanism is empty, nothing is specified about the previous", "# state of the purview; return the purview's maximum entropy", "# distribution.", "if", "not", "mechanism", ":", "return", "max_entropy_distribution", "(", "purview", ",", "self", ".", "tpm_size", ")", "# Use a frozenset so the arguments to `_single_node_cause_repertoire`", "# can be hashed and cached.", "purview", "=", "frozenset", "(", "purview", ")", "# Preallocate the repertoire with the proper shape, so that", "# probabilities are broadcasted appropriately.", "joint", "=", "np", ".", "ones", "(", "repertoire_shape", "(", "purview", ",", "self", ".", "tpm_size", ")", ")", "# The cause repertoire is the product of the cause repertoires of the", "# individual nodes.", "joint", "*=", "functools", ".", "reduce", "(", "np", ".", "multiply", ",", "[", "self", ".", "_single_node_cause_repertoire", "(", "m", ",", "purview", ")", "for", "m", "in", "mechanism", "]", ")", "# The resulting joint distribution is over previous states, which are", "# rows in the TPM, so the distribution is a column. The columns of a", "# TPM don't necessarily sum to 1, so we normalize.", "return", "distribution", ".", "normalize", "(", "joint", ")" ]
Return the cause repertoire of a mechanism over a purview. Args: mechanism (tuple[int]): The mechanism for which to calculate the cause repertoire. purview (tuple[int]): The purview over which to calculate the cause repertoire. Returns: np.ndarray: The cause repertoire of the mechanism over the purview. .. note:: The returned repertoire is a distribution over purview node states, not the states of the whole network.
[ "Return", "the", "cause", "repertoire", "of", "a", "mechanism", "over", "a", "purview", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/subsystem.py#L290-L330
train
wmayner/pyphi
pyphi/subsystem.py
Subsystem.effect_repertoire
def effect_repertoire(self, mechanism, purview): """Return the effect repertoire of a mechanism over a purview. Args: mechanism (tuple[int]): The mechanism for which to calculate the effect repertoire. purview (tuple[int]): The purview over which to calculate the effect repertoire. Returns: np.ndarray: The effect repertoire of the mechanism over the purview. .. note:: The returned repertoire is a distribution over purview node states, not the states of the whole network. """ # If the purview is empty, the distribution is empty, so return the # multiplicative identity. if not purview: return np.array([1.0]) # Use a frozenset so the arguments to `_single_node_effect_repertoire` # can be hashed and cached. mechanism = frozenset(mechanism) # Preallocate the repertoire with the proper shape, so that # probabilities are broadcasted appropriately. joint = np.ones(repertoire_shape(purview, self.tpm_size)) # The effect repertoire is the product of the effect repertoires of the # individual nodes. return joint * functools.reduce( np.multiply, [self._single_node_effect_repertoire(mechanism, p) for p in purview] )
python
def effect_repertoire(self, mechanism, purview): """Return the effect repertoire of a mechanism over a purview. Args: mechanism (tuple[int]): The mechanism for which to calculate the effect repertoire. purview (tuple[int]): The purview over which to calculate the effect repertoire. Returns: np.ndarray: The effect repertoire of the mechanism over the purview. .. note:: The returned repertoire is a distribution over purview node states, not the states of the whole network. """ # If the purview is empty, the distribution is empty, so return the # multiplicative identity. if not purview: return np.array([1.0]) # Use a frozenset so the arguments to `_single_node_effect_repertoire` # can be hashed and cached. mechanism = frozenset(mechanism) # Preallocate the repertoire with the proper shape, so that # probabilities are broadcasted appropriately. joint = np.ones(repertoire_shape(purview, self.tpm_size)) # The effect repertoire is the product of the effect repertoires of the # individual nodes. return joint * functools.reduce( np.multiply, [self._single_node_effect_repertoire(mechanism, p) for p in purview] )
[ "def", "effect_repertoire", "(", "self", ",", "mechanism", ",", "purview", ")", ":", "# If the purview is empty, the distribution is empty, so return the", "# multiplicative identity.", "if", "not", "purview", ":", "return", "np", ".", "array", "(", "[", "1.0", "]", ")", "# Use a frozenset so the arguments to `_single_node_effect_repertoire`", "# can be hashed and cached.", "mechanism", "=", "frozenset", "(", "mechanism", ")", "# Preallocate the repertoire with the proper shape, so that", "# probabilities are broadcasted appropriately.", "joint", "=", "np", ".", "ones", "(", "repertoire_shape", "(", "purview", ",", "self", ".", "tpm_size", ")", ")", "# The effect repertoire is the product of the effect repertoires of the", "# individual nodes.", "return", "joint", "*", "functools", ".", "reduce", "(", "np", ".", "multiply", ",", "[", "self", ".", "_single_node_effect_repertoire", "(", "mechanism", ",", "p", ")", "for", "p", "in", "purview", "]", ")" ]
Return the effect repertoire of a mechanism over a purview. Args: mechanism (tuple[int]): The mechanism for which to calculate the effect repertoire. purview (tuple[int]): The purview over which to calculate the effect repertoire. Returns: np.ndarray: The effect repertoire of the mechanism over the purview. .. note:: The returned repertoire is a distribution over purview node states, not the states of the whole network.
[ "Return", "the", "effect", "repertoire", "of", "a", "mechanism", "over", "a", "purview", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/subsystem.py#L348-L380
train
wmayner/pyphi
pyphi/subsystem.py
Subsystem.repertoire
def repertoire(self, direction, mechanism, purview): """Return the cause or effect repertoire based on a direction. Args: direction (Direction): |CAUSE| or |EFFECT|. mechanism (tuple[int]): The mechanism for which to calculate the repertoire. purview (tuple[int]): The purview over which to calculate the repertoire. Returns: np.ndarray: The cause or effect repertoire of the mechanism over the purview. Raises: ValueError: If ``direction`` is invalid. """ if direction == Direction.CAUSE: return self.cause_repertoire(mechanism, purview) elif direction == Direction.EFFECT: return self.effect_repertoire(mechanism, purview) return validate.direction(direction)
python
def repertoire(self, direction, mechanism, purview): """Return the cause or effect repertoire based on a direction. Args: direction (Direction): |CAUSE| or |EFFECT|. mechanism (tuple[int]): The mechanism for which to calculate the repertoire. purview (tuple[int]): The purview over which to calculate the repertoire. Returns: np.ndarray: The cause or effect repertoire of the mechanism over the purview. Raises: ValueError: If ``direction`` is invalid. """ if direction == Direction.CAUSE: return self.cause_repertoire(mechanism, purview) elif direction == Direction.EFFECT: return self.effect_repertoire(mechanism, purview) return validate.direction(direction)
[ "def", "repertoire", "(", "self", ",", "direction", ",", "mechanism", ",", "purview", ")", ":", "if", "direction", "==", "Direction", ".", "CAUSE", ":", "return", "self", ".", "cause_repertoire", "(", "mechanism", ",", "purview", ")", "elif", "direction", "==", "Direction", ".", "EFFECT", ":", "return", "self", ".", "effect_repertoire", "(", "mechanism", ",", "purview", ")", "return", "validate", ".", "direction", "(", "direction", ")" ]
Return the cause or effect repertoire based on a direction. Args: direction (Direction): |CAUSE| or |EFFECT|. mechanism (tuple[int]): The mechanism for which to calculate the repertoire. purview (tuple[int]): The purview over which to calculate the repertoire. Returns: np.ndarray: The cause or effect repertoire of the mechanism over the purview. Raises: ValueError: If ``direction`` is invalid.
[ "Return", "the", "cause", "or", "effect", "repertoire", "based", "on", "a", "direction", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/subsystem.py#L382-L404
train
wmayner/pyphi
pyphi/subsystem.py
Subsystem.partitioned_repertoire
def partitioned_repertoire(self, direction, partition): """Compute the repertoire of a partitioned mechanism and purview.""" repertoires = [ self.repertoire(direction, part.mechanism, part.purview) for part in partition ] return functools.reduce(np.multiply, repertoires)
python
def partitioned_repertoire(self, direction, partition): """Compute the repertoire of a partitioned mechanism and purview.""" repertoires = [ self.repertoire(direction, part.mechanism, part.purview) for part in partition ] return functools.reduce(np.multiply, repertoires)
[ "def", "partitioned_repertoire", "(", "self", ",", "direction", ",", "partition", ")", ":", "repertoires", "=", "[", "self", ".", "repertoire", "(", "direction", ",", "part", ".", "mechanism", ",", "part", ".", "purview", ")", "for", "part", "in", "partition", "]", "return", "functools", ".", "reduce", "(", "np", ".", "multiply", ",", "repertoires", ")" ]
Compute the repertoire of a partitioned mechanism and purview.
[ "Compute", "the", "repertoire", "of", "a", "partitioned", "mechanism", "and", "purview", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/subsystem.py#L424-L430
train
wmayner/pyphi
pyphi/subsystem.py
Subsystem.expand_repertoire
def expand_repertoire(self, direction, repertoire, new_purview=None): """Distribute an effect repertoire over a larger purview. Args: direction (Direction): |CAUSE| or |EFFECT|. repertoire (np.ndarray): The repertoire to expand. Keyword Args: new_purview (tuple[int]): The new purview to expand the repertoire over. If ``None`` (the default), the new purview is the entire network. Returns: np.ndarray: A distribution over the new purview, where probability is spread out over the new nodes. Raises: ValueError: If the expanded purview doesn't contain the original purview. """ if repertoire is None: return None purview = distribution.purview(repertoire) if new_purview is None: new_purview = self.node_indices # full subsystem if not set(purview).issubset(new_purview): raise ValueError("Expanded purview must contain original purview.") # Get the unconstrained repertoire over the other nodes in the network. non_purview_indices = tuple(set(new_purview) - set(purview)) uc = self.unconstrained_repertoire(direction, non_purview_indices) # Multiply the given repertoire by the unconstrained one to get a # distribution over all the nodes in the network. expanded_repertoire = repertoire * uc return distribution.normalize(expanded_repertoire)
python
def expand_repertoire(self, direction, repertoire, new_purview=None): """Distribute an effect repertoire over a larger purview. Args: direction (Direction): |CAUSE| or |EFFECT|. repertoire (np.ndarray): The repertoire to expand. Keyword Args: new_purview (tuple[int]): The new purview to expand the repertoire over. If ``None`` (the default), the new purview is the entire network. Returns: np.ndarray: A distribution over the new purview, where probability is spread out over the new nodes. Raises: ValueError: If the expanded purview doesn't contain the original purview. """ if repertoire is None: return None purview = distribution.purview(repertoire) if new_purview is None: new_purview = self.node_indices # full subsystem if not set(purview).issubset(new_purview): raise ValueError("Expanded purview must contain original purview.") # Get the unconstrained repertoire over the other nodes in the network. non_purview_indices = tuple(set(new_purview) - set(purview)) uc = self.unconstrained_repertoire(direction, non_purview_indices) # Multiply the given repertoire by the unconstrained one to get a # distribution over all the nodes in the network. expanded_repertoire = repertoire * uc return distribution.normalize(expanded_repertoire)
[ "def", "expand_repertoire", "(", "self", ",", "direction", ",", "repertoire", ",", "new_purview", "=", "None", ")", ":", "if", "repertoire", "is", "None", ":", "return", "None", "purview", "=", "distribution", ".", "purview", "(", "repertoire", ")", "if", "new_purview", "is", "None", ":", "new_purview", "=", "self", ".", "node_indices", "# full subsystem", "if", "not", "set", "(", "purview", ")", ".", "issubset", "(", "new_purview", ")", ":", "raise", "ValueError", "(", "\"Expanded purview must contain original purview.\"", ")", "# Get the unconstrained repertoire over the other nodes in the network.", "non_purview_indices", "=", "tuple", "(", "set", "(", "new_purview", ")", "-", "set", "(", "purview", ")", ")", "uc", "=", "self", ".", "unconstrained_repertoire", "(", "direction", ",", "non_purview_indices", ")", "# Multiply the given repertoire by the unconstrained one to get a", "# distribution over all the nodes in the network.", "expanded_repertoire", "=", "repertoire", "*", "uc", "return", "distribution", ".", "normalize", "(", "expanded_repertoire", ")" ]
Distribute an effect repertoire over a larger purview. Args: direction (Direction): |CAUSE| or |EFFECT|. repertoire (np.ndarray): The repertoire to expand. Keyword Args: new_purview (tuple[int]): The new purview to expand the repertoire over. If ``None`` (the default), the new purview is the entire network. Returns: np.ndarray: A distribution over the new purview, where probability is spread out over the new nodes. Raises: ValueError: If the expanded purview doesn't contain the original purview.
[ "Distribute", "an", "effect", "repertoire", "over", "a", "larger", "purview", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/subsystem.py#L432-L470
train
wmayner/pyphi
pyphi/subsystem.py
Subsystem.cause_info
def cause_info(self, mechanism, purview): """Return the cause information for a mechanism over a purview.""" return repertoire_distance( Direction.CAUSE, self.cause_repertoire(mechanism, purview), self.unconstrained_cause_repertoire(purview) )
python
def cause_info(self, mechanism, purview): """Return the cause information for a mechanism over a purview.""" return repertoire_distance( Direction.CAUSE, self.cause_repertoire(mechanism, purview), self.unconstrained_cause_repertoire(purview) )
[ "def", "cause_info", "(", "self", ",", "mechanism", ",", "purview", ")", ":", "return", "repertoire_distance", "(", "Direction", ".", "CAUSE", ",", "self", ".", "cause_repertoire", "(", "mechanism", ",", "purview", ")", ",", "self", ".", "unconstrained_cause_repertoire", "(", "purview", ")", ")" ]
Return the cause information for a mechanism over a purview.
[ "Return", "the", "cause", "information", "for", "a", "mechanism", "over", "a", "purview", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/subsystem.py#L484-L490
train
wmayner/pyphi
pyphi/subsystem.py
Subsystem.effect_info
def effect_info(self, mechanism, purview): """Return the effect information for a mechanism over a purview.""" return repertoire_distance( Direction.EFFECT, self.effect_repertoire(mechanism, purview), self.unconstrained_effect_repertoire(purview) )
python
def effect_info(self, mechanism, purview): """Return the effect information for a mechanism over a purview.""" return repertoire_distance( Direction.EFFECT, self.effect_repertoire(mechanism, purview), self.unconstrained_effect_repertoire(purview) )
[ "def", "effect_info", "(", "self", ",", "mechanism", ",", "purview", ")", ":", "return", "repertoire_distance", "(", "Direction", ".", "EFFECT", ",", "self", ".", "effect_repertoire", "(", "mechanism", ",", "purview", ")", ",", "self", ".", "unconstrained_effect_repertoire", "(", "purview", ")", ")" ]
Return the effect information for a mechanism over a purview.
[ "Return", "the", "effect", "information", "for", "a", "mechanism", "over", "a", "purview", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/subsystem.py#L492-L498
train
wmayner/pyphi
pyphi/subsystem.py
Subsystem.cause_effect_info
def cause_effect_info(self, mechanism, purview): """Return the cause-effect information for a mechanism over a purview. This is the minimum of the cause and effect information. """ return min(self.cause_info(mechanism, purview), self.effect_info(mechanism, purview))
python
def cause_effect_info(self, mechanism, purview): """Return the cause-effect information for a mechanism over a purview. This is the minimum of the cause and effect information. """ return min(self.cause_info(mechanism, purview), self.effect_info(mechanism, purview))
[ "def", "cause_effect_info", "(", "self", ",", "mechanism", ",", "purview", ")", ":", "return", "min", "(", "self", ".", "cause_info", "(", "mechanism", ",", "purview", ")", ",", "self", ".", "effect_info", "(", "mechanism", ",", "purview", ")", ")" ]
Return the cause-effect information for a mechanism over a purview. This is the minimum of the cause and effect information.
[ "Return", "the", "cause", "-", "effect", "information", "for", "a", "mechanism", "over", "a", "purview", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/subsystem.py#L500-L506
train
wmayner/pyphi
pyphi/subsystem.py
Subsystem.evaluate_partition
def evaluate_partition(self, direction, mechanism, purview, partition, repertoire=None): """Return the |small_phi| of a mechanism over a purview for the given partition. Args: direction (Direction): |CAUSE| or |EFFECT|. mechanism (tuple[int]): The nodes in the mechanism. purview (tuple[int]): The nodes in the purview. partition (Bipartition): The partition to evaluate. Keyword Args: repertoire (np.array): The unpartitioned repertoire. If not supplied, it will be computed. Returns: tuple[int, np.ndarray]: The distance between the unpartitioned and partitioned repertoires, and the partitioned repertoire. """ if repertoire is None: repertoire = self.repertoire(direction, mechanism, purview) partitioned_repertoire = self.partitioned_repertoire(direction, partition) phi = repertoire_distance( direction, repertoire, partitioned_repertoire) return (phi, partitioned_repertoire)
python
def evaluate_partition(self, direction, mechanism, purview, partition, repertoire=None): """Return the |small_phi| of a mechanism over a purview for the given partition. Args: direction (Direction): |CAUSE| or |EFFECT|. mechanism (tuple[int]): The nodes in the mechanism. purview (tuple[int]): The nodes in the purview. partition (Bipartition): The partition to evaluate. Keyword Args: repertoire (np.array): The unpartitioned repertoire. If not supplied, it will be computed. Returns: tuple[int, np.ndarray]: The distance between the unpartitioned and partitioned repertoires, and the partitioned repertoire. """ if repertoire is None: repertoire = self.repertoire(direction, mechanism, purview) partitioned_repertoire = self.partitioned_repertoire(direction, partition) phi = repertoire_distance( direction, repertoire, partitioned_repertoire) return (phi, partitioned_repertoire)
[ "def", "evaluate_partition", "(", "self", ",", "direction", ",", "mechanism", ",", "purview", ",", "partition", ",", "repertoire", "=", "None", ")", ":", "if", "repertoire", "is", "None", ":", "repertoire", "=", "self", ".", "repertoire", "(", "direction", ",", "mechanism", ",", "purview", ")", "partitioned_repertoire", "=", "self", ".", "partitioned_repertoire", "(", "direction", ",", "partition", ")", "phi", "=", "repertoire_distance", "(", "direction", ",", "repertoire", ",", "partitioned_repertoire", ")", "return", "(", "phi", ",", "partitioned_repertoire", ")" ]
Return the |small_phi| of a mechanism over a purview for the given partition. Args: direction (Direction): |CAUSE| or |EFFECT|. mechanism (tuple[int]): The nodes in the mechanism. purview (tuple[int]): The nodes in the purview. partition (Bipartition): The partition to evaluate. Keyword Args: repertoire (np.array): The unpartitioned repertoire. If not supplied, it will be computed. Returns: tuple[int, np.ndarray]: The distance between the unpartitioned and partitioned repertoires, and the partitioned repertoire.
[ "Return", "the", "|small_phi|", "of", "a", "mechanism", "over", "a", "purview", "for", "the", "given", "partition", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/subsystem.py#L511-L539
train
wmayner/pyphi
pyphi/subsystem.py
Subsystem.find_mip
def find_mip(self, direction, mechanism, purview): """Return the minimum information partition for a mechanism over a purview. Args: direction (Direction): |CAUSE| or |EFFECT|. mechanism (tuple[int]): The nodes in the mechanism. purview (tuple[int]): The nodes in the purview. Returns: RepertoireIrreducibilityAnalysis: The irreducibility analysis for the mininum-information partition in one temporal direction. """ if not purview: return _null_ria(direction, mechanism, purview) # Calculate the unpartitioned repertoire to compare against the # partitioned ones. repertoire = self.repertoire(direction, mechanism, purview) def _mip(phi, partition, partitioned_repertoire): # Prototype of MIP with already known data # TODO: Use properties here to infer mechanism and purview from # partition yet access them with `.mechanism` and `.purview`. return RepertoireIrreducibilityAnalysis( phi=phi, direction=direction, mechanism=mechanism, purview=purview, partition=partition, repertoire=repertoire, partitioned_repertoire=partitioned_repertoire, node_labels=self.node_labels ) # State is unreachable - return 0 instead of giving nonsense results if (direction == Direction.CAUSE and np.all(repertoire == 0)): return _mip(0, None, None) mip = _null_ria(direction, mechanism, purview, phi=float('inf')) for partition in mip_partitions(mechanism, purview, self.node_labels): # Find the distance between the unpartitioned and partitioned # repertoire. phi, partitioned_repertoire = self.evaluate_partition( direction, mechanism, purview, partition, repertoire=repertoire) # Return immediately if mechanism is reducible. if phi == 0: return _mip(0.0, partition, partitioned_repertoire) # Update MIP if it's more minimal. if phi < mip.phi: mip = _mip(phi, partition, partitioned_repertoire) return mip
python
def find_mip(self, direction, mechanism, purview): """Return the minimum information partition for a mechanism over a purview. Args: direction (Direction): |CAUSE| or |EFFECT|. mechanism (tuple[int]): The nodes in the mechanism. purview (tuple[int]): The nodes in the purview. Returns: RepertoireIrreducibilityAnalysis: The irreducibility analysis for the mininum-information partition in one temporal direction. """ if not purview: return _null_ria(direction, mechanism, purview) # Calculate the unpartitioned repertoire to compare against the # partitioned ones. repertoire = self.repertoire(direction, mechanism, purview) def _mip(phi, partition, partitioned_repertoire): # Prototype of MIP with already known data # TODO: Use properties here to infer mechanism and purview from # partition yet access them with `.mechanism` and `.purview`. return RepertoireIrreducibilityAnalysis( phi=phi, direction=direction, mechanism=mechanism, purview=purview, partition=partition, repertoire=repertoire, partitioned_repertoire=partitioned_repertoire, node_labels=self.node_labels ) # State is unreachable - return 0 instead of giving nonsense results if (direction == Direction.CAUSE and np.all(repertoire == 0)): return _mip(0, None, None) mip = _null_ria(direction, mechanism, purview, phi=float('inf')) for partition in mip_partitions(mechanism, purview, self.node_labels): # Find the distance between the unpartitioned and partitioned # repertoire. phi, partitioned_repertoire = self.evaluate_partition( direction, mechanism, purview, partition, repertoire=repertoire) # Return immediately if mechanism is reducible. if phi == 0: return _mip(0.0, partition, partitioned_repertoire) # Update MIP if it's more minimal. if phi < mip.phi: mip = _mip(phi, partition, partitioned_repertoire) return mip
[ "def", "find_mip", "(", "self", ",", "direction", ",", "mechanism", ",", "purview", ")", ":", "if", "not", "purview", ":", "return", "_null_ria", "(", "direction", ",", "mechanism", ",", "purview", ")", "# Calculate the unpartitioned repertoire to compare against the", "# partitioned ones.", "repertoire", "=", "self", ".", "repertoire", "(", "direction", ",", "mechanism", ",", "purview", ")", "def", "_mip", "(", "phi", ",", "partition", ",", "partitioned_repertoire", ")", ":", "# Prototype of MIP with already known data", "# TODO: Use properties here to infer mechanism and purview from", "# partition yet access them with `.mechanism` and `.purview`.", "return", "RepertoireIrreducibilityAnalysis", "(", "phi", "=", "phi", ",", "direction", "=", "direction", ",", "mechanism", "=", "mechanism", ",", "purview", "=", "purview", ",", "partition", "=", "partition", ",", "repertoire", "=", "repertoire", ",", "partitioned_repertoire", "=", "partitioned_repertoire", ",", "node_labels", "=", "self", ".", "node_labels", ")", "# State is unreachable - return 0 instead of giving nonsense results", "if", "(", "direction", "==", "Direction", ".", "CAUSE", "and", "np", ".", "all", "(", "repertoire", "==", "0", ")", ")", ":", "return", "_mip", "(", "0", ",", "None", ",", "None", ")", "mip", "=", "_null_ria", "(", "direction", ",", "mechanism", ",", "purview", ",", "phi", "=", "float", "(", "'inf'", ")", ")", "for", "partition", "in", "mip_partitions", "(", "mechanism", ",", "purview", ",", "self", ".", "node_labels", ")", ":", "# Find the distance between the unpartitioned and partitioned", "# repertoire.", "phi", ",", "partitioned_repertoire", "=", "self", ".", "evaluate_partition", "(", "direction", ",", "mechanism", ",", "purview", ",", "partition", ",", "repertoire", "=", "repertoire", ")", "# Return immediately if mechanism is reducible.", "if", "phi", "==", "0", ":", "return", "_mip", "(", "0.0", ",", "partition", ",", "partitioned_repertoire", ")", "# Update MIP if it's more minimal.", "if", "phi", "<", "mip", ".", "phi", ":", "mip", "=", "_mip", "(", "phi", ",", "partition", ",", "partitioned_repertoire", ")", "return", "mip" ]
Return the minimum information partition for a mechanism over a purview. Args: direction (Direction): |CAUSE| or |EFFECT|. mechanism (tuple[int]): The nodes in the mechanism. purview (tuple[int]): The nodes in the purview. Returns: RepertoireIrreducibilityAnalysis: The irreducibility analysis for the mininum-information partition in one temporal direction.
[ "Return", "the", "minimum", "information", "partition", "for", "a", "mechanism", "over", "a", "purview", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/subsystem.py#L541-L598
train
wmayner/pyphi
pyphi/subsystem.py
Subsystem.cause_mip
def cause_mip(self, mechanism, purview): """Return the irreducibility analysis for the cause MIP. Alias for |find_mip()| with ``direction`` set to |CAUSE|. """ return self.find_mip(Direction.CAUSE, mechanism, purview)
python
def cause_mip(self, mechanism, purview): """Return the irreducibility analysis for the cause MIP. Alias for |find_mip()| with ``direction`` set to |CAUSE|. """ return self.find_mip(Direction.CAUSE, mechanism, purview)
[ "def", "cause_mip", "(", "self", ",", "mechanism", ",", "purview", ")", ":", "return", "self", ".", "find_mip", "(", "Direction", ".", "CAUSE", ",", "mechanism", ",", "purview", ")" ]
Return the irreducibility analysis for the cause MIP. Alias for |find_mip()| with ``direction`` set to |CAUSE|.
[ "Return", "the", "irreducibility", "analysis", "for", "the", "cause", "MIP", "." ]
deeca69a084d782a6fde7bf26f59e93b593c5d77
https://github.com/wmayner/pyphi/blob/deeca69a084d782a6fde7bf26f59e93b593c5d77/pyphi/subsystem.py#L600-L605
train