code
stringlengths 66
870k
| docstring
stringlengths 19
26.7k
| func_name
stringlengths 1
138
| language
stringclasses 1
value | repo
stringlengths 7
68
| path
stringlengths 5
324
| url
stringlengths 46
389
| license
stringclasses 7
values |
---|---|---|---|---|---|---|---|
def washout(self, sequence):
""" Force the network to process the sequence instead of the
backprojection values. Used for adjusting the RNN's state. Returns the
outputs of the RNN that are needed for linear regression."""
assert len(sequence) != 0
assert self.outdim == len(sequence[0])
raw_outputs = []
for val in sequence:
backprojection = self._getLastOutput()
backprojection *= self.backprojectionFactor
self._activateNetwork(backprojection)
raw_out = self._getRawOutput()
raw_outputs.append(raw_out)
self._setLastOutput(val)
return array(raw_outputs)
|
Force the network to process the sequence instead of the
backprojection values. Used for adjusting the RNN's state. Returns the
outputs of the RNN that are needed for linear regression.
|
washout
|
python
|
pybrain/pybrain
|
pybrain/structure/modules/evolinonetwork.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/modules/evolinonetwork.py
|
BSD-3-Clause
|
def _activateNetwork(self, input):
""" Run the activate method of the underlying network."""
assert len(input) == self._network.indim
output = array(self._network.activate(input))
self.offset = self._network.offset
return output
|
Run the activate method of the underlying network.
|
_activateNetwork
|
python
|
pybrain/pybrain
|
pybrain/structure/modules/evolinonetwork.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/modules/evolinonetwork.py
|
BSD-3-Clause
|
def extrapolate(self, sequence, length):
""" Extrapolate 'sequence' for 'length' steps and return the
extrapolated sequence as array.
Extrapolating is realized by reseting the network, then washing it out
with the supplied sequence, and then generating a sequence."""
self.reset()
self.washout(sequence)
return self.generate(length)
|
Extrapolate 'sequence' for 'length' steps and return the
extrapolated sequence as array.
Extrapolating is realized by reseting the network, then washing it out
with the supplied sequence, and then generating a sequence.
|
extrapolate
|
python
|
pybrain/pybrain
|
pybrain/structure/modules/evolinonetwork.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/modules/evolinonetwork.py
|
BSD-3-Clause
|
def generate(self, length):
""" Generate a sequence of specified length.
Use .reset() and .washout() before."""
generated_sequence = [] #empty(length)
for _ in range(length):
backprojection = self._getLastOutput()
backprojection *= self.backprojectionFactor
out = self._activateNetwork(backprojection)
generated_sequence.append(out)
return array(generated_sequence)
|
Generate a sequence of specified length.
Use .reset() and .washout() before.
|
generate
|
python
|
pybrain/pybrain
|
pybrain/structure/modules/evolinonetwork.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/modules/evolinonetwork.py
|
BSD-3-Clause
|
def _getLastOutput(self):
"""Return the current output of the linear output layer."""
if self.offset == 0:
return zeros(self.outdim)
else:
return self._out_layer.outputbuffer[self.offset - 1]
|
Return the current output of the linear output layer.
|
_getLastOutput
|
python
|
pybrain/pybrain
|
pybrain/structure/modules/evolinonetwork.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/modules/evolinonetwork.py
|
BSD-3-Clause
|
def _validateGenomeLayer(self, layer):
"""Validate the type and state of a layer."""
assert isinstance(layer, LSTMLayer)
assert not layer.peepholes
|
Validate the type and state of a layer.
|
_validateGenomeLayer
|
python
|
pybrain/pybrain
|
pybrain/structure/modules/evolinonetwork.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/modules/evolinonetwork.py
|
BSD-3-Clause
|
def _getGenomeOfLayer(self, layer):
"""Return the genome of a single layer."""
self._validateGenomeLayer(layer)
connections = self._getInputConnectionsOfLayer(layer)
layer_weights = []
# iterate cells of layer
for cell_idx in range(layer.outdim):
# todo: the evolino paper uses a different order of weights for the genotype of a lstm cell
cell_weights = []
# iterate weight types (ingate, forgetgate, cell and outgate)
for t in range(4):
# iterate connections
for c in connections:
# iterate sources of connection
for i in range(c.indim):
idx = i + cell_idx * c.indim + t * layer.outdim * c.indim
cell_weights.append(c.params[idx])
layer_weights.append(cell_weights)
return layer_weights
|
Return the genome of a single layer.
|
_getGenomeOfLayer
|
python
|
pybrain/pybrain
|
pybrain/structure/modules/evolinonetwork.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/modules/evolinonetwork.py
|
BSD-3-Clause
|
def _setGenomeOfLayer(self, layer, weights):
"""Set the genome of a single layer."""
self._validateGenomeLayer(layer)
connections = self._getInputConnectionsOfLayer(layer)
# iterate cells of layer
for cell_idx in range(layer.outdim):
# todo: the evolino paper uses a different order of weights for the genotype of a lstm cell
cell_weights = weights[cell_idx]
# iterate weight types (ingate, forgetgate, cell and outgate)
for t in range(4):
# iterate connections
for c in connections:
# iterate sources of connection
for i in range(c.indim):
idx = i + cell_idx * c.indim + t * layer.outdim * c.indim
c.params[idx] = cell_weights.pop(0)
|
Set the genome of a single layer.
|
_setGenomeOfLayer
|
python
|
pybrain/pybrain
|
pybrain/structure/modules/evolinonetwork.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/modules/evolinonetwork.py
|
BSD-3-Clause
|
def setOutputWeightMatrix(self, W):
"""Set the weight matrix of the linear output layer."""
c = self._hid_to_out_connection
c.params[:] = W.flatten()
|
Set the weight matrix of the linear output layer.
|
setOutputWeightMatrix
|
python
|
pybrain/pybrain
|
pybrain/structure/modules/evolinonetwork.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/modules/evolinonetwork.py
|
BSD-3-Clause
|
def getOutputWeightMatrix(self):
"""Return the weight matrix of the linear output layer."""
c = self._hid_to_out_connection
p = c.params
return reshape(p, (c.outdim, c.indim))
|
Return the weight matrix of the linear output layer.
|
getOutputWeightMatrix
|
python
|
pybrain/pybrain
|
pybrain/structure/modules/evolinonetwork.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/modules/evolinonetwork.py
|
BSD-3-Clause
|
def _getInputConnectionsOfLayer(self, layer):
"""Return a list of all input connections for the layer."""
connections = []
all_cons = list(self._network.recurrentConns)
all_cons += sum(list(self._network.connections.values()), [])
for c in all_cons:
if c.outmod is layer:
if not isinstance(c, FullConnection):
raise NotImplementedError(
"Only FullConnections are supported")
connections.append(c)
return connections
|
Return a list of all input connections for the layer.
|
_getInputConnectionsOfLayer
|
python
|
pybrain/pybrain
|
pybrain/structure/modules/evolinonetwork.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/modules/evolinonetwork.py
|
BSD-3-Clause
|
def setSigma(self, sigma):
"""Wrapper method to set the sigmas (the parameters of the module) to a
certain value. """
assert len(sigma) == self.indim
self._params *= 0
self._params += sigma
|
Wrapper method to set the sigmas (the parameters of the module) to a
certain value.
|
setSigma
|
python
|
pybrain/pybrain
|
pybrain/structure/modules/gaussianlayer.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/modules/gaussianlayer.py
|
BSD-3-Clause
|
def _forwardImplementation(self, inbuf, outbuf):
""" assigns one of the neurons to the input given in inbuf and writes
the neuron's coordinates to outbuf. """
# calculate the winner neuron with lowest error (square difference)
self.difference = self.neurons - tile(inbuf, (self.nNeurons, self.nNeurons, 1))
error = sum(self.difference ** 2, 2)
self.winner = array(minimum_position(error))
if not self.outputFullMap:
outbuf[:] = self.winner
|
assigns one of the neurons to the input given in inbuf and writes
the neuron's coordinates to outbuf.
|
_forwardImplementation
|
python
|
pybrain/pybrain
|
pybrain/structure/modules/kohonen.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/modules/kohonen.py
|
BSD-3-Clause
|
def _backwardImplementation(self, outerr, inerr, outbuf, inbuf):
""" trains the kohonen map in unsupervised manner, moving the
closest neuron and its neighbours closer to the input pattern. """
# calculate neighbourhood and limit to edge of matrix
n = floor(self.neighbours)
self.neighbours *= self.neighbourdecay
tl = (self.winner - n)
br = (self.winner + n + 1)
tl[tl < 0] = 0
br[br > self.nNeurons + 1] = self.nNeurons + 1
# calculate distance matrix
tempm = 1 - sum(abs(self.distmatrix - self.winner.reshape(1, 1, 2)), 2) / self.nNeurons
tempm[tempm < 0] = 0
distm = zeros((self.nNeurons, self.nNeurons, self.nInput))
for i in range(self.nInput):
distm[:, :, i] = tempm
distm[:, :, i] = tempm
self.neurons[tl[0]:br[0], tl[1]:br[1]] -= self.learningrate * self.difference[tl[0]:br[0], tl[1]:br[1]] * distm[tl[0]:br[0], tl[1]:br[1]]
|
trains the kohonen map in unsupervised manner, moving the
closest neuron and its neighbours closer to the input pattern.
|
_backwardImplementation
|
python
|
pybrain/pybrain
|
pybrain/structure/modules/kohonen.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/modules/kohonen.py
|
BSD-3-Clause
|
def __init__(self, dim, peepholes = False, name = None):
"""
:arg dim: number of cells
:key peepholes: enable peephole connections (from state to gates)? """
self.setArgs(dim = dim, peepholes = peepholes)
# Internal buffers, created dynamically:
self.bufferlist = [
('ingate', dim),
('outgate', dim),
('forgetgate', dim),
('ingatex', dim),
('outgatex', dim),
('forgetgatex', dim),
('state', dim),
('ingateError', dim),
('outgateError', dim),
('forgetgateError', dim),
('stateError', dim),
]
Module.__init__(self, 4*dim, dim, name)
if self.peepholes:
ParameterContainer.__init__(self, dim*3)
self._setParameters(self.params)
self._setDerivatives(self.derivs)
|
:arg dim: number of cells
:key peepholes: enable peephole connections (from state to gates)?
|
__init__
|
python
|
pybrain/pybrain
|
pybrain/structure/modules/lstm.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/modules/lstm.py
|
BSD-3-Clause
|
def meatSlice(self):
"""Return a moduleslice that wraps the meat part of the layer."""
return ModuleSlice(self,
inSliceTo=self.dim * (3 + self.dimensions),
outSliceTo=self.dim)
|
Return a moduleslice that wraps the meat part of the layer.
|
meatSlice
|
python
|
pybrain/pybrain
|
pybrain/structure/modules/mdlstm.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/modules/mdlstm.py
|
BSD-3-Clause
|
def stateSlice(self):
"""Return a moduleslice that wraps the state transfer part of the layer.
"""
return ModuleSlice(self,
inSliceFrom=self.dim * (3 + self.dimensions),
outSliceFrom=self.dim)
|
Return a moduleslice that wraps the state transfer part of the layer.
|
stateSlice
|
python
|
pybrain/pybrain
|
pybrain/structure/modules/mdlstm.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/modules/mdlstm.py
|
BSD-3-Clause
|
def __init__(self, timedim, shape,
hiddendim, outsize, blockshape=None, name=None):
"""Initialize an MdrnnLayer.
The dimensionality of the sequence - for example 2 for a
picture or 3 for a video - is given by `timedim`, while the sidelengths
along each dimension are given by the tuple `shape`.
The layer will have `hiddendim` hidden units per swiping direction. The
number of swiping directions is given by 2**timedim, which corresponds
to one swipe from each corner to its opposing corner and back.
To indicate how many outputs per timesteps are used, you have to specify
`outsize`.
In order to treat blocks of the input and not single voxels, you can
also specify `blockshape`. For example the layer will then feed (2, 2)
chunks into the network at each timestep which correspond to the (2, 2)
rectangles that the input can be split into.
"""
self.timedim = timedim
self.shape = shape
blockshape = tuple([1] * timedim) if blockshape is None else blockshape
self.blockshape = shape
self.hiddendim = hiddendim
self.outsize = outsize
self.indim = reduce(operator.mul, shape, 1)
self.blocksize = reduce(operator.mul, blockshape, 1)
self.sequenceLength = self.indim // self.blocksize
self.outdim = self.sequenceLength * self.outsize
self.bufferlist = [('cellStates', self.sequenceLength * self.hiddendim)]
Module.__init__(self, self.indim, self.outdim, name=name)
# Amount of parameters that are required for the input to the hidden
self.num_in_params = self.blocksize * self.hiddendim * (3 + self.timedim)
# Amount of parameters that are needed for the recurrent connections.
# There is one of the parameter for every time dimension.
self.num_rec_params = outsize * hiddendim * (3 + self.timedim)
# Amount of parameters that are needed for the output.
self.num_out_params = outsize * hiddendim
# Amount of parameters that are needed from the bias to the hidden and
# the output
self.num_bias_params = (3 + self.timedim) * self.hiddendim + self.outsize
# Total list of parameters.
self.num_params = sum((self.num_in_params,
self.timedim * self.num_rec_params,
self.num_out_params,
self.num_bias_params))
ParameterContainer.__init__(self, self.num_params)
# Some layers for internal use.
self.hiddenlayer = MDLSTMLayer(self.hiddendim, self.timedim)
# Every point in the sequence has timedim predecessors.
self.predlayers = [LinearLayer(self.outsize) for _ in range(timedim)]
# We need a single layer to hold the input. We will swipe a connection
# over the corrects part of it, in order to feed the correct input in.
self.inlayer = LinearLayer(self.indim)
# Make some layers the same to save memory.
self.inlayer.inputbuffer = self.inlayer.outputbuffer = self.inputbuffer
# In order to allocate not too much memory, we just set the size of the
# layer to 1 and correct it afterwards.
self.outlayer = LinearLayer(self.outdim)
self.outlayer.inputbuffer = self.outlayer.outputbuffer = self.outputbuffer
self.bias = BiasUnit()
|
Initialize an MdrnnLayer.
The dimensionality of the sequence - for example 2 for a
picture or 3 for a video - is given by `timedim`, while the sidelengths
along each dimension are given by the tuple `shape`.
The layer will have `hiddendim` hidden units per swiping direction. The
number of swiping directions is given by 2**timedim, which corresponds
to one swipe from each corner to its opposing corner and back.
To indicate how many outputs per timesteps are used, you have to specify
`outsize`.
In order to treat blocks of the input and not single voxels, you can
also specify `blockshape`. For example the layer will then feed (2, 2)
chunks into the network at each timestep which correspond to the (2, 2)
rectangles that the input can be split into.
|
__init__
|
python
|
pybrain/pybrain
|
pybrain/structure/modules/mdrnnlayer.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/modules/mdrnnlayer.py
|
BSD-3-Clause
|
def __init__(self, dim, name = None, mix=5):
"""Initialize mixture density layer - mix gives the number of Gaussians
to mix, dim is the dimension of the target(!) vector."""
nUnits = mix * (dim + 2) # mean vec + stddev and mixing coeff
NeuronLayer.__init__(self, nUnits, name)
self.nGaussians = mix
self.nDims = dim
|
Initialize mixture density layer - mix gives the number of Gaussians
to mix, dim is the dimension of the target(!) vector.
|
__init__
|
python
|
pybrain/pybrain
|
pybrain/structure/modules/mixturedensity.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/modules/mixturedensity.py
|
BSD-3-Clause
|
def _forwardImplementation(self, inbuf, outbuf):
"""Calculate layer outputs (Gaussian parameters etc., not function
values!) from given activations """
K = self.nGaussians
# Mixing parameters and stddevs
outbuf[0:K*2] = safeExp(inbuf[0:K*2])
outbuf[0:K] /= sum(outbuf[0:K])
# Means
outbuf[K*2:] = inbuf[K*2:]
|
Calculate layer outputs (Gaussian parameters etc., not function
values!) from given activations
|
_forwardImplementation
|
python
|
pybrain/pybrain
|
pybrain/structure/modules/mixturedensity.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/modules/mixturedensity.py
|
BSD-3-Clause
|
def _backwardImplementation(self, outerr, inerr, outbuf, inbuf):
"""Calculate the derivatives of output wrt. corresponding input
activations."""
# Cannot calculate because we would need the targets!
# ==> we just pass through the stuff from the trainer, who takes care
# of the rest
inerr[:] = outerr
|
Calculate the derivatives of output wrt. corresponding input
activations.
|
_backwardImplementation
|
python
|
pybrain/pybrain
|
pybrain/structure/modules/mixturedensity.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/modules/mixturedensity.py
|
BSD-3-Clause
|
def __init__(self, indim, outdim, name=None, **args):
"""Create a Module with an input dimension of indim and an output
dimension of outdim."""
self.setArgs(name=name, **args)
# Make sure that it does not matter whether Module.__init__ is called
# before or after adding elements to bufferlist in subclasses.
# TODO: it should be possible to use less than these buffers. For some
# methods, an error is not completely necessary. (e.g. evolution)
self.bufferlist = [] if not self.bufferlist else self.bufferlist
self.bufferlist += [('inputbuffer', indim),
('inputerror', indim),
('outputbuffer', outdim),
('outputerror', outdim), ]
self.indim = indim
self.outdim = outdim
# Those buffers are 2D arrays (time, dim)
self._resetBuffers()
|
Create a Module with an input dimension of indim and an output
dimension of outdim.
|
__init__
|
python
|
pybrain/pybrain
|
pybrain/structure/modules/module.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/modules/module.py
|
BSD-3-Clause
|
def _resetBuffers(self, length=1):
"""Reset buffers to a length (in time dimension) of 1."""
for buffername, dim in self.bufferlist:
setattr(self, buffername, zeros((length, dim)))
if length==1:
self.offset = 0
|
Reset buffers to a length (in time dimension) of 1.
|
_resetBuffers
|
python
|
pybrain/pybrain
|
pybrain/structure/modules/module.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/modules/module.py
|
BSD-3-Clause
|
def _growBuffers(self):
"""Double the size of the modules buffers in its first dimension and
keep the current values."""
currentlength = getattr(self, self.bufferlist[0][0]).shape[0]
# Save the current buffers
tmp = [getattr(self, n) for n, _ in self.bufferlist]
Module._resetBuffers(self, currentlength * 2)
for previous, (buffername, _dim) in zip(tmp, self.bufferlist):
buffer_ = getattr(self, buffername)
buffer_[:currentlength] = previous
|
Double the size of the modules buffers in its first dimension and
keep the current values.
|
_growBuffers
|
python
|
pybrain/pybrain
|
pybrain/structure/modules/module.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/modules/module.py
|
BSD-3-Clause
|
def backward(self):
"""Produce the input error from the output error."""
self._backwardImplementation(self.outputerror[self.offset],
self.inputerror[self.offset],
self.outputbuffer[self.offset],
self.inputbuffer[self.offset])
|
Produce the input error from the output error.
|
backward
|
python
|
pybrain/pybrain
|
pybrain/structure/modules/module.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/modules/module.py
|
BSD-3-Clause
|
def reset(self):
"""Set all buffers, past and present, to zero."""
self.offset = 0
for buffername, l in self.bufferlist:
buf = getattr(self, buffername)
buf[:] = zeros(l)
|
Set all buffers, past and present, to zero.
|
reset
|
python
|
pybrain/pybrain
|
pybrain/structure/modules/module.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/modules/module.py
|
BSD-3-Clause
|
def shift(self, items):
"""Shift all buffers up or down a defined number of items on offset axis.
Negative values indicate backward shift."""
if items == 0:
return
self.offset += items
for buffername, _ in self.bufferlist:
buf = getattr(self, buffername)
assert abs(items) <= len(buf), "Cannot shift further than length of buffer."
fill = zeros((abs(items), len(buf[0])))
if items < 0:
buf[:] = append(buf[-items:], fill, 0)
else:
buf[:] = append(fill ,buf[0:-items] , 0)
|
Shift all buffers up or down a defined number of items on offset axis.
Negative values indicate backward shift.
|
shift
|
python
|
pybrain/pybrain
|
pybrain/structure/modules/module.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/modules/module.py
|
BSD-3-Clause
|
def activateOnDataset(self, dataset):
"""Run the module's forward pass on the given dataset unconditionally
and return the output."""
dataset.reset()
self.reset()
out = zeros((len(dataset), self.outdim))
for i, sample in enumerate(dataset):
# FIXME: Can we always assume that sample[0] is the input data?
out[i, :] = self.activate(sample[0])
self.reset()
dataset.reset()
return out
|
Run the module's forward pass on the given dataset unconditionally
and return the output.
|
activateOnDataset
|
python
|
pybrain/pybrain
|
pybrain/structure/modules/module.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/modules/module.py
|
BSD-3-Clause
|
def activate(self, inpt):
"""Do one transformation of an input and return the result."""
assert len(self.inputbuffer[self.offset]) == len(inpt), str((len(self.inputbuffer[self.offset]), len(inpt)))
self.inputbuffer[self.offset] = inpt
self.forward()
return self.outputbuffer[self.offset].copy()
|
Do one transformation of an input and return the result.
|
activate
|
python
|
pybrain/pybrain
|
pybrain/structure/modules/module.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/modules/module.py
|
BSD-3-Clause
|
def backActivate(self, outerr):
"""Do one transformation of an output error outerr backward and return
the error on the input."""
self.outputerror[self.offset] = outerr
self.backward()
return self.inputerror[self.offset].copy()
|
Do one transformation of an output error outerr backward and return
the error on the input.
|
backActivate
|
python
|
pybrain/pybrain
|
pybrain/structure/modules/module.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/modules/module.py
|
BSD-3-Clause
|
def _backwardImplementation(self, outerr, inerr, outbuf, inbuf):
"""Converse of the module's transformation function. Can be overwritten
in subclasses, does not have to.
Should also compute the derivatives of the parameters."""
|
Converse of the module's transformation function. Can be overwritten
in subclasses, does not have to.
Should also compute the derivatives of the parameters.
|
_backwardImplementation
|
python
|
pybrain/pybrain
|
pybrain/structure/modules/module.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/modules/module.py
|
BSD-3-Clause
|
def whichNeuron(self, inputIndex=None, outputIndex=None):
"""Determine which neuron a position in the input/output buffer
corresponds to. """
if inputIndex is not None:
return inputIndex
if outputIndex is not None:
return outputIndex
|
Determine which neuron a position in the input/output buffer
corresponds to.
|
whichNeuron
|
python
|
pybrain/pybrain
|
pybrain/structure/modules/neuronlayer.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/modules/neuronlayer.py
|
BSD-3-Clause
|
def __init__(self, indim=0, outdim=0, model=None):
""" Initializes as empty module.
If `model` is given, initialize using this LIBSVM model instead. `indim`
and `outdim` are for compatibility only, and ignored."""
self.reset()
# set some dummy input/ouput dimensions - these become obsolete when
# the SVM is initialized
self.indim = 0
self.outdim = 0
self.setModel(model)
|
Initializes as empty module.
If `model` is given, initialize using this LIBSVM model instead. `indim`
and `outdim` are for compatibility only, and ignored.
|
__init__
|
python
|
pybrain/pybrain
|
pybrain/structure/modules/svmunit.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/modules/svmunit.py
|
BSD-3-Clause
|
def forwardPass(self, values=False):
""" Produce the output from the current input vector, or process a
dataset.
If `values` is False or 'class', output is set to the number of the
predicted class. If True or 'raw', produces decision values instead.
These are stored in a dictionary for multi-class SVM. If `prob`, class
probabilities are produced. This works only if probability option was
set for SVM training."""
if values == "class" or values == False:
# predict the output class right away
self.output = self.model.predict(self.input)
elif values == 'raw' or values == True:
# return a dict of decision values for each one-on-one class
# combination (i,j)
self.output = self.model.predict_values(self.input)
else: # values == "prob"
# return probability (works only for multiclass!)
self.output = self.model.predict_probability(self.input)
|
Produce the output from the current input vector, or process a
dataset.
If `values` is False or 'class', output is set to the number of the
predicted class. If True or 'raw', produces decision values instead.
These are stored in a dictionary for multi-class SVM. If `prob`, class
probabilities are produced. This works only if probability option was
set for SVM training.
|
forwardPass
|
python
|
pybrain/pybrain
|
pybrain/structure/modules/svmunit.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/modules/svmunit.py
|
BSD-3-Clause
|
def activateOnDataset(self, dataset, values=False):
""" Run the module's forward pass on the given dataset unconditionally
and return the output as a list.
:arg dataset: A non-sequential supervised data set.
:key values: Passed trough to forwardPass() method."""
out = []
inp = dataset['input']
for i in range(inp.shape[0]):
self.input = inp[i, :]
# carry out forward pass to get decision values for each class combo
self.forwardPass(values=values)
out.append(self.output)
return out
|
Run the module's forward pass on the given dataset unconditionally
and return the output as a list.
:arg dataset: A non-sequential supervised data set.
:key values: Passed trough to forwardPass() method.
|
activateOnDataset
|
python
|
pybrain/pybrain
|
pybrain/structure/modules/svmunit.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/modules/svmunit.py
|
BSD-3-Clause
|
def __init__(self, numRows, numColumns, name=None):
""" initialize with the number of rows and columns. the table
values are all set to zero.
"""
Module.__init__(self, 2, 1, name)
ParameterContainer.__init__(self, numRows*numColumns)
self.numRows = numRows
self.numColumns = numColumns
|
initialize with the number of rows and columns. the table
values are all set to zero.
|
__init__
|
python
|
pybrain/pybrain
|
pybrain/structure/modules/table.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/modules/table.py
|
BSD-3-Clause
|
def __init__(self, predefined = None, **kwargs):
""" For the current implementation, the sequence length
needs to be fixed, and given at construction time. """
if predefined is not None:
self.predefined = predefined
else:
self.predefined = {}
FeedForwardNetwork.__init__(self, **kwargs)
assert self.seqlen is not None
# the input is a 1D-mesh (as a view on a flat input layer)
inmod = LinearLayer(self.inputsize * self.seqlen, name='input')
inmesh = ModuleMesh.viewOnFlatLayer(inmod, (self.seqlen,), 'inmesh')
# the output is also a 1D-mesh
outmod = self.outcomponentclass(self.outputsize * self.seqlen, name='output')
outmesh = ModuleMesh.viewOnFlatLayer(outmod, (self.seqlen,), 'outmesh')
# the hidden layers are places in a 2xseqlen mesh
hiddenmesh = ModuleMesh.constructWithLayers(self.componentclass, self.hiddensize,
(2, self.seqlen), 'hidden')
# add the modules
for c in inmesh:
self.addInputModule(c)
for c in outmesh:
self.addOutputModule(c)
for c in hiddenmesh:
self.addModule(c)
# set the connections weights to be shared
inconnf = MotherConnection(inmesh.componentOutdim * hiddenmesh.componentIndim, name='inconn')
outconnf = MotherConnection(outmesh.componentIndim * hiddenmesh.componentOutdim, name='outconn')
forwardconn = MotherConnection(hiddenmesh.componentIndim * hiddenmesh.componentOutdim, name='fconn')
if self.symmetric:
backwardconn = forwardconn
inconnb = inconnf
outconnb = outconnf
else:
backwardconn = MotherConnection(hiddenmesh.componentIndim * hiddenmesh.componentOutdim, name='bconn')
inconnb = MotherConnection(inmesh.componentOutdim * hiddenmesh.componentIndim, name='inconn')
outconnb = MotherConnection(outmesh.componentIndim * hiddenmesh.componentOutdim, name='outconn')
# build the connections
for i in range(self.seqlen):
# input to hidden
self.addConnection(SharedFullConnection(inconnf, inmesh[(i,)], hiddenmesh[(0, i)]))
self.addConnection(SharedFullConnection(inconnb, inmesh[(i,)], hiddenmesh[(1, i)]))
# hidden to output
self.addConnection(SharedFullConnection(outconnf, hiddenmesh[(0, i)], outmesh[(i,)]))
self.addConnection(SharedFullConnection(outconnb, hiddenmesh[(1, i)], outmesh[(i,)]))
if i > 0:
# forward in time
self.addConnection(SharedFullConnection(forwardconn, hiddenmesh[(0, i - 1)], hiddenmesh[(0, i)]))
if i < self.seqlen - 1:
# backward in time
self.addConnection(SharedFullConnection(backwardconn, hiddenmesh[(1, i + 1)], hiddenmesh[(1, i)]))
self.sortModules()
|
For the current implementation, the sequence length
needs to be fixed, and given at construction time.
|
__init__
|
python
|
pybrain/pybrain
|
pybrain/structure/networks/bidirectional.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/networks/bidirectional.py
|
BSD-3-Clause
|
def _canonicForm(self, tup, dim):
""" determine if there is a symmetrical tuple of lower coordinates
:key dim: the removed coordinate. """
if not self.symmetricdimensions:
return tup
canonic = []
for dim, maxval in enumerate(tupleRemoveItem(self.dims, dim)):
canonic.append(min(maxval-1-tup[dim], tup[dim]))
return tuple(canonic)
|
determine if there is a symmetrical tuple of lower coordinates
:key dim: the removed coordinate.
|
_canonicForm
|
python
|
pybrain/pybrain
|
pybrain/structure/networks/borderswiping.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/networks/borderswiping.py
|
BSD-3-Clause
|
def _extrapolateBorderAt(self, t, using):
""" maybe we can use weights that are similar to neighboring borderconnections
as initialization. """
closest = reachable(decrementAny, [t], list(using.keys()))
if len(closest) > 0:
params = zeros(using[list(closest.keys())[0]].paramdim)
normalize = 0.
for c, dist in list(closest.items()):
params += using[c].params / dist
normalize += 1./dist
params /= normalize
return params
return None
|
maybe we can use weights that are similar to neighboring borderconnections
as initialization.
|
_extrapolateBorderAt
|
python
|
pybrain/pybrain
|
pybrain/structure/networks/borderswiping.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/networks/borderswiping.py
|
BSD-3-Clause
|
def activate(self, inpt):
"""Do one transformation of an input and return the result."""
self.reset()
return super(FeedForwardNetworkComponent, self).activate(inpt)
|
Do one transformation of an input and return the result.
|
activate
|
python
|
pybrain/pybrain
|
pybrain/structure/networks/feedforward.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/networks/feedforward.py
|
BSD-3-Clause
|
def _permsForSwiping(self):
"""Return the correct permutations of blocks for all swiping direction.
"""
# We use an identity permutation to generate the permutations from by
# slicing correctly.
return [self._standardPermutation()]
|
Return the correct permutations of blocks for all swiping direction.
|
_permsForSwiping
|
python
|
pybrain/pybrain
|
pybrain/structure/networks/mdrnn.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/networks/mdrnn.py
|
BSD-3-Clause
|
def _permsForSwiping(self):
"""Return the correct permutations of blocks for all swiping direction.
"""
# We use an identity permutation to generate the permutations from by
# slicing correctly.
identity = scipy.array(list(range(self.sequenceLength)))
identity.shape = tuple(s // b for s, b in zip(self.shape, self.blockshape))
permutations = []
# Loop over all possible directions: from each corner to each corner
for direction in crossproduct([('+', '-')] * self.timedim):
axises = []
for _, axisdir in enumerate(direction):
# Use a normal complete slice for forward...
if axisdir == '+':
indices = slice(None, None, 1)
# ...and a reversed complete slice for backward
else:
indices = slice(None, None, -1)
axises.append(indices)
permutations.append(operator.getitem(identity, axises).flatten())
return permutations
|
Return the correct permutations of blocks for all swiping direction.
|
_permsForSwiping
|
python
|
pybrain/pybrain
|
pybrain/structure/networks/mdrnn.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/networks/mdrnn.py
|
BSD-3-Clause
|
def __init__(self, dims, **args):
""" The one required argument specifies the sizes of each dimension (minimum 2) """
SwipingNetwork.__init__(self, dims = dims, **args)
pdims = product(dims)
# the input is a 2D-mesh (as a view on a flat input layer)
inmod = LinearLayer(self.insize*pdims, name = 'input')
inmesh = ModuleMesh.viewOnFlatLayer(inmod, dims, 'inmesh')
# the output is a 2D-mesh (as a view on a flat sigmoid output layer)
outmod = self.outcomponentclass(self.outputs*pdims, name = 'output')
outmesh = ModuleMesh.viewOnFlatLayer(outmod, dims, 'outmesh')
if self.componentclass is MDLSTMLayer:
c = lambda: MDLSTMLayer(self.hsize, 2, self.peepholes).meatSlice()
adims = tuple(list(dims)+[4])
hiddenmesh = ModuleMesh(c, adims, 'hidden', baserename = True)
else:
hiddenmesh = ModuleMesh.constructWithLayers(self.componentclass, self.hsize, tuple(list(dims)+[self.swipes]), 'hidden')
self._buildSwipingStructure(inmesh, hiddenmesh, outmesh)
# add the identity connections for the states
for m in self.modules:
if isinstance(m, MDLSTMLayer):
tmp = m.stateSlice()
index = 0
for c in list(self.connections[m]):
if isinstance(c.outmod, MDLSTMLayer):
self.addConnection(IdentityConnection(tmp, c.outmod.stateSlice(),
outSliceFrom = self.hsize*(index),
outSliceTo = self.hsize*(index+1)))
index += 1
self.sortModules()
|
The one required argument specifies the sizes of each dimension (minimum 2)
|
__init__
|
python
|
pybrain/pybrain
|
pybrain/structure/networks/multidimensional.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/networks/multidimensional.py
|
BSD-3-Clause
|
def __getitem__(self, name):
"""Return the module with the given name."""
for m in self.modules:
if m.name == name:
return m
return None
|
Return the module with the given name.
|
__getitem__
|
python
|
pybrain/pybrain
|
pybrain/structure/networks/network.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/networks/network.py
|
BSD-3-Clause
|
def _containerIterator(self):
"""Return an iterator over the non-empty ParameterContainers of the
network.
The order IS deterministic."""
for m in self.modulesSorted:
if m.paramdim:
yield m
for c in self.connections[m]:
if c.paramdim and not isinstance(c, SharedConnection):
yield c
for mc in self.motherconnections:
if mc.paramdim:
yield mc
|
Return an iterator over the non-empty ParameterContainers of the
network.
The order IS deterministic.
|
_containerIterator
|
python
|
pybrain/pybrain
|
pybrain/structure/networks/network.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/networks/network.py
|
BSD-3-Clause
|
def addModule(self, m):
"""Add the given module to the network."""
if isinstance(m, ModuleSlice):
m = m.base
if m not in self.modules:
self.modules.add(m)
if not m in self.connections:
self.connections[m] = []
if m.paramdim > 0:
m.owner = self
if m.sequential and not self.sequential:
logging.warning(
("Module %s is sequential, and added to a FFN. Are you sure " +
"you know what you're doing?") % m)
self.sorted = False
|
Add the given module to the network.
|
addModule
|
python
|
pybrain/pybrain
|
pybrain/structure/networks/network.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/networks/network.py
|
BSD-3-Clause
|
def addInputModule(self, m):
"""Add the given module to the network and mark it as an input module.
"""
if isinstance(m, ModuleSlice): m = m.base
if m not in self.inmodules:
self.inmodules.append(m)
self.addModule(m)
|
Add the given module to the network and mark it as an input module.
|
addInputModule
|
python
|
pybrain/pybrain
|
pybrain/structure/networks/network.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/networks/network.py
|
BSD-3-Clause
|
def addOutputModule(self, m):
"""Add the given module to the network and mark it as an output module.
"""
if isinstance(m, ModuleSlice):
m = m.base
if m not in self.outmodules:
self.outmodules.append(m)
self.addModule(m)
|
Add the given module to the network and mark it as an output module.
|
addOutputModule
|
python
|
pybrain/pybrain
|
pybrain/structure/networks/network.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/networks/network.py
|
BSD-3-Clause
|
def addConnection(self, c):
"""Add the given connection to the network."""
if not c.inmod in self.connections:
self.connections[c.inmod] = []
self.connections[c.inmod].append(c)
if isinstance(c, SharedConnection):
if c.mother not in self.motherconnections:
self.motherconnections.append(c.mother)
c.mother.owner = self
elif c.paramdim > 0:
c.owner = self
self.sorted = False
|
Add the given connection to the network.
|
addConnection
|
python
|
pybrain/pybrain
|
pybrain/structure/networks/network.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/networks/network.py
|
BSD-3-Clause
|
def reset(self):
"""Reset all component modules and the network."""
Module.reset(self)
for m in self.modules:
m.reset()
|
Reset all component modules and the network.
|
reset
|
python
|
pybrain/pybrain
|
pybrain/structure/networks/network.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/networks/network.py
|
BSD-3-Clause
|
def _setParameters(self, p, owner=None):
""" put slices of this array back into the modules """
ParameterContainer._setParameters(self, p, owner)
index = 0
for x in self._containerIterator():
x._setParameters(self.params[index:index + x.paramdim], self)
index += x.paramdim
|
put slices of this array back into the modules
|
_setParameters
|
python
|
pybrain/pybrain
|
pybrain/structure/networks/network.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/networks/network.py
|
BSD-3-Clause
|
def _setDerivatives(self, d, owner=None):
""" put slices of this array back into the modules """
ParameterContainer._setDerivatives(self, d, owner)
index = 0
for x in self._containerIterator():
x._setDerivatives(self.derivs[index:index + x.paramdim], self)
index += x.paramdim
|
put slices of this array back into the modules
|
_setDerivatives
|
python
|
pybrain/pybrain
|
pybrain/structure/networks/network.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/networks/network.py
|
BSD-3-Clause
|
def _topologicalSort(self):
"""Update the network structure and make .modulesSorted a topologically
sorted list of the modules."""
# Algorithm: R. E. Tarjan (1972), stolen from:
# http://www.bitformation.com/art/python_toposort.html
# Create a directed graph, including a counter of incoming connections.
graph = {}
for node in self.modules:
if node not in graph:
# Zero incoming connections.
graph[node] = [0]
for c in chain(*list(self.connections.values())):
graph[c.inmod].append(c.outmod)
# Update the count of incoming arcs in outnode.
graph[c.outmod][0] += 1
# Find all roots (nodes with zero incoming arcs).
roots = [node for (node, nodeinfo) in list(graph.items()) if nodeinfo[0] == 0]
# Make sure the ordering on all runs is the same.
roots.sort(key=lambda x: x.name)
# Repeatedly emit a root and remove it from the graph. Removing
# a node may convert some of the node's direct children into roots.
# Whenever that happens, we append the new roots to the list of
# current roots.
self.modulesSorted = []
while len(roots) != 0:
root = roots[0]
roots = roots[1:]
self.modulesSorted.append(root)
for child in graph[root][1:]:
graph[child][0] -= 1
if graph[child][0] == 0:
roots.append(child)
del graph[root]
if graph:
raise NetworkConstructionException("Loop in network graph.")
|
Update the network structure and make .modulesSorted a topologically
sorted list of the modules.
|
_topologicalSort
|
python
|
pybrain/pybrain
|
pybrain/structure/networks/network.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/networks/network.py
|
BSD-3-Clause
|
def sortModules(self):
"""Prepare the network for activation by sorting the internal
datastructure.
Needs to be called before activation."""
if self.sorted:
return
# Sort the modules.
self._topologicalSort()
# Sort the connections by name.
for m in self.modules:
self.connections[m].sort(key=lambda x: x.name)
self.motherconnections.sort(key=lambda x: x.name)
# Create a single array with all parameters.
tmp = [pc.params for pc in self._containerIterator()]
total_size = sum(scipy.size(i) for i in tmp)
ParameterContainer.__init__(self, total_size)
if total_size > 0:
self.params[:] = scipy.concatenate(tmp)
self._setParameters(self.params)
# Create a single array with all derivatives.
tmp = [pc.derivs for pc in self._containerIterator()]
self.resetDerivatives()
self.derivs[:] = scipy.concatenate(tmp)
self._setDerivatives(self.derivs)
# TODO: make this a property; indim and outdim are invalid before
# .sortModules is called!
# Determine the input and output dimensions of the network.
self.indim = int(sum(m.indim for m in self.inmodules))
self.outdim = int(sum(m.outdim for m in self.outmodules))
self.indim = 0
for m in self.inmodules:
self.indim += m.indim
self.outdim = 0
for m in self.outmodules:
self.outdim += m.outdim
# Initialize the network buffers.
self.bufferlist = []
Module.__init__(self, self.indim, self.outdim, name=self.name)
self.sorted = True
|
Prepare the network for activation by sorting the internal
datastructure.
Needs to be called before activation.
|
sortModules
|
python
|
pybrain/pybrain
|
pybrain/structure/networks/network.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/networks/network.py
|
BSD-3-Clause
|
def convertToFastNetwork(self):
""" Attempt to transform the network into a fast network. If fast networks are not available,
or the network cannot be converted, it returns None. """
from pybrain.structure.networks import FeedForwardNetwork, RecurrentNetwork
try:
from arac.pybrainbridge import _RecurrentNetwork, _FeedForwardNetwork #@UnresolvedImport
except:
print("No fast networks available.")
return None
net = self.copy()
if isinstance(net, FeedForwardNetwork):
cnet = _FeedForwardNetwork()
elif isinstance(net, RecurrentNetwork):
cnet = _RecurrentNetwork()
for m in net.inmodules:
cnet.addInputModule(m)
for m in net.outmodules:
cnet.addOutputModule(m)
for m in net.modules:
cnet.addModule(m)
for clist in list(net.connections.values()):
for c in clist:
cnet.addConnection(c)
if isinstance(net, RecurrentNetwork):
for c in net.recurrentConns:
cnet.addRecurrentConnection(c)
try:
cnet.sortModules()
except ValueError:
print("Network cannot be converted.")
return None
cnet.owner = cnet
return cnet
|
Attempt to transform the network into a fast network. If fast networks are not available,
or the network cannot be converted, it returns None.
|
convertToFastNetwork
|
python
|
pybrain/pybrain
|
pybrain/structure/networks/network.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/networks/network.py
|
BSD-3-Clause
|
def _constructParameterInfo(self):
""" construct a dictionnary with information about each parameter:
The key is the index in self.params, and the value is a tuple containing
(inneuron, outneuron), where a neuron is a tuple of it's module and an index.
"""
self.paramInfo = {}
index = 0
for x in self._containerIterator():
if isinstance(x, FullConnection):
for w in range(x.paramdim):
inbuf, outbuf = x.whichBuffers(w)
self.paramInfo[index + w] = ((x.inmod, x.inmod.whichNeuron(outputIndex=inbuf)),
(x.outmod, x.outmod.whichNeuron(inputIndex=outbuf)))
elif isinstance(x, NeuronLayer):
for n in range(x.paramdim):
self.paramInfo[index + n] = ((x, n), (x, n))
else:
raise
index += x.paramdim
|
construct a dictionnary with information about each parameter:
The key is the index in self.params, and the value is a tuple containing
(inneuron, outneuron), where a neuron is a tuple of it's module and an index.
|
_constructParameterInfo
|
python
|
pybrain/pybrain
|
pybrain/structure/networks/neurondecomposable.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/networks/neurondecomposable.py
|
BSD-3-Clause
|
def getDecomposition(self):
""" return a list of arrays, each corresponding to one neuron's relevant parameters """
res = []
for neuron in self._neuronIterator():
nIndices = self.decompositionIndices[neuron]
if len(nIndices) > 0:
tmp = zeros(len(nIndices))
for i, ni in enumerate(nIndices):
tmp[i] = self.params[ni]
res.append(tmp)
return res
|
return a list of arrays, each corresponding to one neuron's relevant parameters
|
getDecomposition
|
python
|
pybrain/pybrain
|
pybrain/structure/networks/neurondecomposable.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/networks/neurondecomposable.py
|
BSD-3-Clause
|
def setDecomposition(self, decomposedParams):
""" set parameters by neuron decomposition,
each corresponding to one neuron's relevant parameters """
nindex = 0
for neuron in self._neuronIterator():
nIndices = self.decompositionIndices[neuron]
if len(nIndices) > 0:
for i, ni in enumerate(nIndices):
self.params[ni] = decomposedParams[nindex][i]
nindex += 1
|
set parameters by neuron decomposition,
each corresponding to one neuron's relevant parameters
|
setDecomposition
|
python
|
pybrain/pybrain
|
pybrain/structure/networks/neurondecomposable.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/networks/neurondecomposable.py
|
BSD-3-Clause
|
def convertNormalNetwork(n):
""" convert a normal network into a decomposable one """
if isinstance(n, RecurrentNetwork):
res = RecurrentDecomposableNetwork()
for c in n.recurrentConns:
res.addRecurrentConnection(c)
else:
res = FeedForwardDecomposableNetwork()
for m in n.inmodules:
res.addInputModule(m)
for m in n.outmodules:
res.addOutputModule(m)
for m in n.modules:
res.addModule(m)
for c in chain(*list(n.connections.values())):
res.addConnection(c)
res.name = n.name
res.sortModules()
return res
|
convert a normal network into a decomposable one
|
convertNormalNetwork
|
python
|
pybrain/pybrain
|
pybrain/structure/networks/neurondecomposable.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/networks/neurondecomposable.py
|
BSD-3-Clause
|
def fromDims(cls, visibledim, hiddendim, params=None, biasParams=None):
"""Return a restricted Boltzmann machine of the given dimensions with the
given distributions."""
net = FeedForwardNetwork()
bias = BiasUnit('bias')
visible = LinearLayer(visibledim, 'visible')
hidden = SigmoidLayer(hiddendim, 'hidden')
con1 = FullConnection(visible, hidden)
con2 = FullConnection(bias, hidden)
if params is not None:
con1.params[:] = params
if biasParams is not None:
con2.params[:] = biasParams
net.addInputModule(visible)
net.addModule(bias)
net.addOutputModule(hidden)
net.addConnection(con1)
net.addConnection(con2)
net.sortModules()
return cls(net)
|
Return a restricted Boltzmann machine of the given dimensions with the
given distributions.
|
fromDims
|
python
|
pybrain/pybrain
|
pybrain/structure/networks/rbm.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/networks/rbm.py
|
BSD-3-Clause
|
def addRecurrentConnection(self, c):
"""Add a connection to the network and mark it as a recurrent one."""
if isinstance(c, SharedConnection):
if c.mother not in self.motherconnections:
self.motherconnections.append(c.mother)
c.mother.owner = self
elif c.paramdim > 0:
c.owner = self
self.recurrentConns.append(c)
self.sorted = False
|
Add a connection to the network and mark it as a recurrent one.
|
addRecurrentConnection
|
python
|
pybrain/pybrain
|
pybrain/structure/networks/recurrent.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/networks/recurrent.py
|
BSD-3-Clause
|
def activate(self, inpt):
"""Do one transformation of an input and return the result."""
self.inputbuffer[self.offset] = inpt
self.forward()
if self.forget:
return self.outputbuffer[self.offset].copy()
else:
return self.outputbuffer[self.offset - 1].copy()
|
Do one transformation of an input and return the result.
|
activate
|
python
|
pybrain/pybrain
|
pybrain/structure/networks/recurrent.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/networks/recurrent.py
|
BSD-3-Clause
|
def backActivate(self, outerr):
"""Do one transformation of an output error outerr backward and return
the error on the input."""
self.outputerror[self.offset - 1] = outerr
self.backward()
return self.inputerror[self.offset].copy()
|
Do one transformation of an output error outerr backward and return
the error on the input.
|
backActivate
|
python
|
pybrain/pybrain
|
pybrain/structure/networks/recurrent.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/networks/recurrent.py
|
BSD-3-Clause
|
def forward(self):
"""Produce the output from the input."""
if not (self.offset + 1 < self.inputbuffer.shape[0]):
self._growBuffers()
super(RecurrentNetworkComponent, self).forward()
self.offset += 1
self.maxoffset = max(self.offset, self.maxoffset)
|
Produce the output from the input.
|
forward
|
python
|
pybrain/pybrain
|
pybrain/structure/networks/recurrent.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/networks/recurrent.py
|
BSD-3-Clause
|
def _verifyDimensions(self, inmesh, hiddenmesh, outmesh):
""" verify dimension matching between the meshes """
assert self.dims == inmesh.dims
assert outmesh.dims == self.dims
assert tuple(hiddenmesh.dims[:-1]) == self.dims, '%s <-> %s' % (
hiddenmesh.dims[:-1], self.dims)
assert hiddenmesh.dims[-1] == self.swipes
assert min(self.dims) > 1
|
verify dimension matching between the meshes
|
_verifyDimensions
|
python
|
pybrain/pybrain
|
pybrain/structure/networks/swiping.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/networks/swiping.py
|
BSD-3-Clause
|
def _buildSwipingStructure(self, inmesh, hiddenmesh, outmesh):
"""
:key inmesh: a mesh of input units
:key hiddenmesh: a mesh of hidden units
:key outmesh: a mesh of output units
"""
self._verifyDimensions(inmesh, hiddenmesh, outmesh)
# add the modules
for c in inmesh:
self.addInputModule(c)
for c in outmesh:
self.addOutputModule(c)
for c in hiddenmesh:
self.addModule(c)
# create the motherconnections if they are not provided
if 'inconn' not in self.predefined:
self.predefined['inconn'] = MotherConnection(inmesh.componentOutdim * hiddenmesh.componentIndim, name='inconn')
if 'outconn' not in self.predefined:
self.predefined['outconn'] = MotherConnection(outmesh.componentIndim * hiddenmesh.componentOutdim, name='outconn')
if 'hconns' not in self.predefined:
self.predefined['hconns'] = {}
for s in range(len(self.dims)):
if self.symmetricdirections:
if s > 0 and self.symmetricdimensions:
self.predefined['hconns'][s] = self.predefined['hconns'][0]
else:
self.predefined['hconns'][s] = MotherConnection(hiddenmesh.componentIndim *
hiddenmesh.componentOutdim, name='hconn' + str(s))
else:
for dir in ['-', '+']:
if s > 0 and self.symmetricdimensions:
self.predefined['hconns'][(s, dir)] = self.predefined['hconns'][(0, dir)]
else:
self.predefined['hconns'][(s, dir)] = MotherConnection(hiddenmesh.componentIndim *
hiddenmesh.componentOutdim, name='hconn' + str(s) + dir)
# establish the connections
for unit in self._iterateOverUnits():
for swipe in range(self.swipes):
hunit = tuple(list(unit) + [swipe])
self.addConnection(SharedFullConnection(self.predefined['inconn'], inmesh[unit], hiddenmesh[hunit]))
self.addConnection(SharedFullConnection(self.predefined['outconn'], hiddenmesh[hunit], outmesh[unit]))
# one swiping connection along every dimension
for dim, maxval in enumerate(self.dims):
# determine where the swipe is coming from in this direction:
# swipe directions are towards higher coordinates on dim D if the swipe%(2**D) = 0
# and towards lower coordinates otherwise.
previousunit = list(hunit)
if (swipe // 2 ** dim) % 2 == 0:
previousunit[dim] -= 1
dir = '+'
else:
previousunit[dim] += 1
dir = '-'
if self.symmetricdirections:
hconn = self.predefined['hconns'][dim]
else:
hconn = self.predefined['hconns'][(dim, dir)]
previousunit = tuple(previousunit)
if previousunit[dim] >= 0 and previousunit[dim] < maxval:
self.addConnection(SharedFullConnection(hconn, hiddenmesh[previousunit], hiddenmesh[hunit]))
|
:key inmesh: a mesh of input units
:key hiddenmesh: a mesh of hidden units
:key outmesh: a mesh of output units
|
_buildSwipingStructure
|
python
|
pybrain/pybrain
|
pybrain/structure/networks/swiping.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/networks/swiping.py
|
BSD-3-Clause
|
def _printPredefined(self, dic=None, indent=0):
""" print the weights of the Motherconnections in the self.predefined dictionary (recursively)"""
if dic == None:
dic = self.predefined
for k, val in sorted(dic.items()):
print((' ' * indent, k,))
if isinstance(val, dict):
print(':')
self._printPredefined(val, indent + 2)
elif isinstance(val, MotherConnection):
print((val.params))
else:
print(val)
|
print the weights of the Motherconnections in the self.predefined dictionary (recursively)
|
_printPredefined
|
python
|
pybrain/pybrain
|
pybrain/structure/networks/swiping.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/networks/swiping.py
|
BSD-3-Clause
|
def __init__(self, **args):
"""
:key clusterssize: the side of the square for clustering: if > 1, an extra layer for cluster-construction is added
:key clusteroverlap: by how much should the cluster overlap (default = 0)
:key directlink: should connections from the input directly to the bottleneck be included?
"""
if 'size' in args:
self.size = args['size']
args['dims'] = (self.size, self.size)
assert self.size > 1, 'Minimal board size is 2.'
BorderSwipingNetwork.__init__(self, **args)
if not self.rebuilt:
self._buildCaptureNetwork()
self.sortModules()
self.rebuilt = True
self.setArgs(rebuilt = True)
|
:key clusterssize: the side of the square for clustering: if > 1, an extra layer for cluster-construction is added
:key clusteroverlap: by how much should the cluster overlap (default = 0)
:key directlink: should connections from the input directly to the bottleneck be included?
|
__init__
|
python
|
pybrain/pybrain
|
pybrain/structure/networks/custom/capturegame.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/networks/custom/capturegame.py
|
BSD-3-Clause
|
def _generateName(self):
""" generate a quasi unique name, using construction parameters """
name = self.__class__.__name__
#if self.size != 5:
name += '-s'+str(self.size)
name += '-h'+str(self.hsize)
if self.directlink:
name += '-direct'
if self.componentclass != TanhLayer:
name += '-'+self.componentclass.__name__
if self.outputs > 1:
name += '-o'+str(self.outputs)
if self.combinputs > 0:
name += '-combin'+str(self.combinputs)
#if self.bnecksize != 1:
# name += '-bn'+str(self.bnecksize)
#if self.combbnecksize > 0:
# name += '-combbn'+str(self.combbnecksize)
#if self.clusterssize != 1:
# name += '-cluster'+str(self.clusterssize)+'ov'+str(self.clusteroverlap)
# add a 6-digit random number, for distinction:
name += '--'+str(int(random.random()*9e5+1e5))
# TODO: use hash of the weights.
return name
|
generate a quasi unique name, using construction parameters
|
_generateName
|
python
|
pybrain/pybrain
|
pybrain/structure/networks/custom/capturegame.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/networks/custom/capturegame.py
|
BSD-3-Clause
|
def resizedTo(self, newsize):
""" Produce a copy of the network, with a different size but with the same (shared) weights,
extrapolating on the borders as necessary. """
if newsize == self.size:
return self.copy()
else:
import copy
# TODO: ugly hack!
# remove recurrent references
for mc in self.motherconnections:
mc.owner = None
# copy the connections from the self.predefined dictionnary:
cdict = copy.deepcopy(self.predefined)
args = self.argdict.copy()
args['size'] = newsize
del args['rebuilt']
# put the references back in
for mc in self.motherconnections:
mc.owner = self
return CaptureGameNetwork(predefined = cdict, **args)
|
Produce a copy of the network, with a different size but with the same (shared) weights,
extrapolating on the borders as necessary.
|
resizedTo
|
python
|
pybrain/pybrain
|
pybrain/structure/networks/custom/capturegame.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/structure/networks/custom/capturegame.py
|
BSD-3-Clause
|
def __init__(self, evolino_network, dataset, **kwargs):
""" :key evolino_network: an instance of NetworkWrapper()
:key dataset: The evaluation dataset
:key evalfunc: Compares output to target values and returns a scalar, denoting the fitness.
Defaults to -mse(output, target).
:key wtRatio: Float array of two values denoting the ratio between washout and training length.
Defaults to [1,2]
:key verbosity: Verbosity level. Defaults to 0
"""
Filter.__init__(self)
ap = KWArgsProcessor(self, kwargs)
ap.add('verbosity', default=0)
ap.add('evalfunc', default=lambda output, target:-Validator.MSE(output, target))
ap.add('wtRatio', default=array([1, 2], float))
self.network = evolino_network
self.dataset = dataset
self.max_fitness = -Infinity
|
:key evolino_network: an instance of NetworkWrapper()
:key dataset: The evaluation dataset
:key evalfunc: Compares output to target values and returns a scalar, denoting the fitness.
Defaults to -mse(output, target).
:key wtRatio: Float array of two values denoting the ratio between washout and training length.
Defaults to [1,2]
:key verbosity: Verbosity level. Defaults to 0
|
__init__
|
python
|
pybrain/pybrain
|
pybrain/supervised/evolino/filter.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/supervised/evolino/filter.py
|
BSD-3-Clause
|
def _evaluateNet(self, net, dataset, wtRatio):
""" Evaluates the performance of net on the given dataset.
Returns the fitness value.
:key net: Instance of EvolinoNetwork to evaluate
:key dataset: Sequences to test the net on
:key wtRatio: See __init__
"""
# === extract sequences from dataset ===
numSequences = dataset.getNumSequences()
washout_sequences = []
training_sequences = []
for i in range(numSequences):
sequence = dataset.getSequence(i)[1]
training_start = int(wtRatio * len(sequence))
washout_sequences.append(sequence[ : training_start ])
training_sequences.append(sequence[ training_start : ])
# === collect raw output (denoted by phi) ===
phis = []
for i in range(numSequences):
net.reset()
net.washout(washout_sequences[i])
phi = net.washout(training_sequences[i])
phis.append(phi)
# === calculate and set weights of linear output layer ===
PHI = concatenate(phis).T
PHI_INV = pinv2(PHI)
TARGET = concatenate(training_sequences).T
W = dot(TARGET, PHI_INV)
net.setOutputWeightMatrix(W)
# === collect outputs by applying the newly configured network ===
outputs = []
for i in range(numSequences):
out = net.extrapolate(washout_sequences[i], len(training_sequences[i]))
outputs.append(out)
# === calculate fitness value ===
OUTPUT = concatenate(outputs)
TARGET = concatenate(training_sequences)
fitness = self.evalfunc(OUTPUT, TARGET)
return fitness
|
Evaluates the performance of net on the given dataset.
Returns the fitness value.
:key net: Instance of EvolinoNetwork to evaluate
:key dataset: Sequences to test the net on
:key wtRatio: See __init__
|
_evaluateNet
|
python
|
pybrain/pybrain
|
pybrain/supervised/evolino/filter.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/supervised/evolino/filter.py
|
BSD-3-Clause
|
def apply(self, population):
""" Evaluate each individual, and store fitness inside population.
Also calculate and set the weight matrix W of the linear output layer.
:arg population: Instance of EvolinoPopulation
"""
net = self.network
dataset = self.dataset
population.clearFitness()
best_W = None
best_fitness = -Infinity
# iterate all individuals. Note, that these individuals are created on the fly
for individual in population.getIndividuals():
# load the individual's genome into the weights of the net
net.setGenome(individual.getGenome())
fitness = self._evaluateNet(net, dataset, self.wtRatio)
if self.verbosity > 1:
print(("Calculated fitness for individual", id(individual), " is ", fitness))
# set the individual fitness
population.setIndividualFitness(individual, fitness)
if best_fitness < fitness:
best_fitness = fitness
best_genome = deepcopy(individual.getGenome())
best_W = deepcopy(net.getOutputWeightMatrix())
net.reset()
net.setGenome(best_genome)
net.setOutputWeightMatrix(best_W)
# store fitness maximum to use it for triggering burst mutation
self.max_fitness = best_fitness
|
Evaluate each individual, and store fitness inside population.
Also calculate and set the weight matrix W of the linear output layer.
:arg population: Instance of EvolinoPopulation
|
apply
|
python
|
pybrain/pybrain
|
pybrain/supervised/evolino/filter.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/supervised/evolino/filter.py
|
BSD-3-Clause
|
def apply(self, population):
""" The subpopulations of the EvolinoPopulation are iterated and forwarded
to the EvolinoSubSelection() operator.
:arg population: object of type EvolinoPopulation
"""
self.sub_selection.nParents = self.nParents
for sp in population.getSubPopulations():
self.sub_selection.apply(sp)
|
The subpopulations of the EvolinoPopulation are iterated and forwarded
to the EvolinoSubSelection() operator.
:arg population: object of type EvolinoPopulation
|
apply
|
python
|
pybrain/pybrain
|
pybrain/supervised/evolino/filter.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/supervised/evolino/filter.py
|
BSD-3-Clause
|
def __init__(self, **kwargs):
""" :key **kwargs: will be forwarded to the EvolinoSubReproduction constructor
"""
Filter.__init__(self)
self._kwargs = kwargs
|
:key **kwargs: will be forwarded to the EvolinoSubReproduction constructor
|
__init__
|
python
|
pybrain/pybrain
|
pybrain/supervised/evolino/filter.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/supervised/evolino/filter.py
|
BSD-3-Clause
|
def apply(self, population):
""" The subpopulations of the EvolinoPopulation are iterated and forwarded
to the EvolinoSubReproduction() operator.
:arg population: object of type EvolinoPopulation
"""
sps = population.getSubPopulations()
reproduction = EvolinoSubReproduction(**self._kwargs)
for sp in sps:
reproduction.apply(sp)
|
The subpopulations of the EvolinoPopulation are iterated and forwarded
to the EvolinoSubReproduction() operator.
:arg population: object of type EvolinoPopulation
|
apply
|
python
|
pybrain/pybrain
|
pybrain/supervised/evolino/filter.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/supervised/evolino/filter.py
|
BSD-3-Clause
|
def __init__(self, **kwargs):
""" :key **kwargs: will be forwarded to the EvolinoSubReproduction constructor
"""
Filter.__init__(self)
self._kwargs = kwargs
|
:key **kwargs: will be forwarded to the EvolinoSubReproduction constructor
|
__init__
|
python
|
pybrain/pybrain
|
pybrain/supervised/evolino/filter.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/supervised/evolino/filter.py
|
BSD-3-Clause
|
def apply(self, population):
""" Keeps just the best fitting individual of each subpopulation.
All other individuals are erased. After that, the kept best fitting
individuals will be used for reproduction, in order to refill the
sub-populations.
"""
sps = population.getSubPopulations()
for sp in sps:
n_toremove = sp.getIndividualsN() - 1
sp.removeWorstIndividuals(n_toremove)
reproduction = EvolinoSubReproduction(**self._kwargs)
reproduction.apply(sp)
|
Keeps just the best fitting individual of each subpopulation.
All other individuals are erased. After that, the kept best fitting
individuals will be used for reproduction, in order to refill the
sub-populations.
|
apply
|
python
|
pybrain/pybrain
|
pybrain/supervised/evolino/filter.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/supervised/evolino/filter.py
|
BSD-3-Clause
|
def apply(self, population):
""" Simply removes some individuals with lowest fitness values
"""
n = population.getIndividualsN()
if self.nParents is None:
nKeep = n // 4
else:
nKeep = self.nParents
assert nKeep >= 0
assert nKeep <= n
population.removeWorstIndividuals(n - nKeep)
|
Simply removes some individuals with lowest fitness values
|
apply
|
python
|
pybrain/pybrain
|
pybrain/supervised/evolino/filter.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/supervised/evolino/filter.py
|
BSD-3-Clause
|
def __init__(self, **kwargs):
""" :key verbosity: Verbosity level
:key mutationVariate: Variate used for mutation. Defaults to None
:key mutation: Defaults to EvolinoSubMutation
"""
Filter.__init__(self)
ap = KWArgsProcessor(self, kwargs)
ap.add('verbosity', default=0)
ap.add('mutationVariate', default=None)
ap.add('mutation', default=EvolinoSubMutation())
if self.mutationVariate is not None:
self.mutation.mutationVariate = self.mutationVariate
|
:key verbosity: Verbosity level
:key mutationVariate: Variate used for mutation. Defaults to None
:key mutation: Defaults to EvolinoSubMutation
|
__init__
|
python
|
pybrain/pybrain
|
pybrain/supervised/evolino/filter.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/supervised/evolino/filter.py
|
BSD-3-Clause
|
def apply(self, population):
""" First determines the number of individuals to be created.
Then clones the fittest individuals (=parents), mutates these clones
and adds them to the population.
"""
max_n = population.getMaxNIndividuals()
n = population.getIndividualsN()
freespace = max_n - n
best = population.getBestIndividualsSorted(freespace)
children = set()
while True:
if len(children) >= freespace: break
for parent in best:
children.add(parent.copy())
if len(children) >= freespace: break
dummy_population = SimplePopulation()
dummy_population.addIndividuals(children)
self.mutation.apply(dummy_population)
population.addIndividuals(dummy_population.getIndividuals())
assert population.getMaxNIndividuals() == population.getIndividualsN()
|
First determines the number of individuals to be created.
Then clones the fittest individuals (=parents), mutates these clones
and adds them to the population.
|
apply
|
python
|
pybrain/pybrain
|
pybrain/supervised/evolino/filter.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/supervised/evolino/filter.py
|
BSD-3-Clause
|
def __init__(self):
""" :key kwargs: See setArgs() method documentation
"""
SimpleGenomeManipulation.__init__(self)
self.mutationVariate = GaussianVariate()
self.mutationVariate.alpha = 0.1
self.verbosity = 0
|
:key kwargs: See setArgs() method documentation
|
__init__
|
python
|
pybrain/pybrain
|
pybrain/supervised/evolino/gfilter.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/supervised/evolino/gfilter.py
|
BSD-3-Clause
|
def _manipulateValue(self, value):
""" Implementation of the abstract method of class SimpleGenomeManipulation
Set's the x0 value of the variate to value and takes a new sample
value and returns it.
"""
self.mutationVariate.x0 = value
newval = self.mutationVariate.getSample()
# print("MUTATED: ", value, "--->", newval)
return newval
|
Implementation of the abstract method of class SimpleGenomeManipulation
Set's the x0 value of the variate to value and takes a new sample
value and returns it.
|
_manipulateValue
|
python
|
pybrain/pybrain
|
pybrain/supervised/evolino/gfilter.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/supervised/evolino/gfilter.py
|
BSD-3-Clause
|
def getSortedIndividualList(self):
""" Returns a sorted list of all individuals with descending fitness values. """
fitness = self._fitness
return sorted(iter(fitness.keys()), key=lambda k:-fitness[k])
|
Returns a sorted list of all individuals with descending fitness values.
|
getSortedIndividualList
|
python
|
pybrain/pybrain
|
pybrain/supervised/evolino/gpopulation.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/supervised/evolino/gpopulation.py
|
BSD-3-Clause
|
def getGenome(self):
""" Returns the genome created by concatenating the chromosomes supplied
by the sub-individuals.
"""
genome = []
for sub_individual in self._sub_individuals:
genome.append(deepcopy(sub_individual.getGenome()))
return genome
|
Returns the genome created by concatenating the chromosomes supplied
by the sub-individuals.
|
getGenome
|
python
|
pybrain/pybrain
|
pybrain/supervised/evolino/individual.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/supervised/evolino/individual.py
|
BSD-3-Clause
|
def __init__(self, genome):
""" :key genome: Any kind of nested iteratable container containing
floats as leafs
"""
self.setGenome(genome)
self.id = EvolinoSubIndividual._next_id
EvolinoSubIndividual._next_id += 1
|
:key genome: Any kind of nested iteratable container containing
floats as leafs
|
__init__
|
python
|
pybrain/pybrain
|
pybrain/supervised/evolino/individual.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/supervised/evolino/individual.py
|
BSD-3-Clause
|
def _validateGenomeLayer(self, layer):
""" Validates the type and state of a layer
"""
assert isinstance(layer, LSTMLayer)
assert not layer.peepholes
|
Validates the type and state of a layer
|
_validateGenomeLayer
|
python
|
pybrain/pybrain
|
pybrain/supervised/evolino/networkwrapper.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/supervised/evolino/networkwrapper.py
|
BSD-3-Clause
|
def _getGenomeOfLayer(self, layer):
""" Returns the genome of a single layer.
"""
self._validateGenomeLayer(layer)
dim = layer.outdim
layer_weights = []
connections = self._getInputConnectionsOfLayer(layer)
for cell_idx in range(dim):
# todo: the evolino paper uses a different order of weights for the genotype of a lstm cell
cell_weights = []
for c in connections:
cell_weights += [
c.params[ cell_idx + 0 * dim ],
c.params[ cell_idx + 1 * dim ],
c.params[ cell_idx + 2 * dim ],
c.params[ cell_idx + 3 * dim ] ]
layer_weights.append(cell_weights)
return layer_weights
|
Returns the genome of a single layer.
|
_getGenomeOfLayer
|
python
|
pybrain/pybrain
|
pybrain/supervised/evolino/networkwrapper.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/supervised/evolino/networkwrapper.py
|
BSD-3-Clause
|
def _setGenomeOfLayer(self, layer, weights):
""" Sets the genome of a single layer.
"""
self._validateGenomeLayer(layer)
dim = layer.outdim
connections = self._getInputConnectionsOfLayer(layer)
for cell_idx in range(dim):
cell_weights = weights.pop(0)
for c in connections:
params = c.params
params[cell_idx + 0 * dim] = cell_weights.pop(0)
params[cell_idx + 1 * dim] = cell_weights.pop(0)
params[cell_idx + 2 * dim] = cell_weights.pop(0)
params[cell_idx + 3 * dim] = cell_weights.pop(0)
assert not len(cell_weights)
|
Sets the genome of a single layer.
|
_setGenomeOfLayer
|
python
|
pybrain/pybrain
|
pybrain/supervised/evolino/networkwrapper.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/supervised/evolino/networkwrapper.py
|
BSD-3-Clause
|
def setOutputWeightMatrix(self, W):
""" Sets the weight matrix of the output layer's input connection.
"""
c = self._hid_to_out_connection
c.params[:] = W.flatten()
|
Sets the weight matrix of the output layer's input connection.
|
setOutputWeightMatrix
|
python
|
pybrain/pybrain
|
pybrain/supervised/evolino/networkwrapper.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/supervised/evolino/networkwrapper.py
|
BSD-3-Clause
|
def getOutputWeightMatrix(self):
""" Sets the weight matrix of the output layer's input connection.
"""
c = self._hid_to_out_connection
p = c.getParameters()
return reshape(p, (c.outdim, c.indim))
|
Sets the weight matrix of the output layer's input connection.
|
getOutputWeightMatrix
|
python
|
pybrain/pybrain
|
pybrain/supervised/evolino/networkwrapper.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/supervised/evolino/networkwrapper.py
|
BSD-3-Clause
|
def _getInputConnectionsOfLayer(self, layer):
""" Returns a list of all input connections for the layer. """
connections = []
for c in sum(list(self._network.connections.values()), []):
if c.outmod is layer:
if not isinstance(c, FullConnection):
raise NotImplementedError("At the time there is only support for FullConnection")
connections.append(c)
return connections
|
Returns a list of all input connections for the layer.
|
_getInputConnectionsOfLayer
|
python
|
pybrain/pybrain
|
pybrain/supervised/evolino/networkwrapper.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/supervised/evolino/networkwrapper.py
|
BSD-3-Clause
|
def __init__(self, network):
""" :key network: The network to be wrapped
"""
self.network = network
self._output_connection = None
self._last_hidden_layer = None
self._first_hidden_layer = None
self._establishRecurrence()
|
:key network: The network to be wrapped
|
__init__
|
python
|
pybrain/pybrain
|
pybrain/supervised/evolino/networkwrapper.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/supervised/evolino/networkwrapper.py
|
BSD-3-Clause
|
def _establishRecurrence(self):
""" Adds a recurrent full connection from the output layer to the first
hidden layer.
"""
network = self.network
outlayer = self.getOutputLayer()
hid1layer = self.getFirstHiddenLayer()
network.addRecurrentConnection(FullConnection(outlayer, hid1layer))
|
Adds a recurrent full connection from the output layer to the first
hidden layer.
|
_establishRecurrence
|
python
|
pybrain/pybrain
|
pybrain/supervised/evolino/networkwrapper.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/supervised/evolino/networkwrapper.py
|
BSD-3-Clause
|
def _validateGenomeLayer(self, layer):
""" Validates the type and state of a layer
"""
assert isinstance(layer, LSTMLayer)
assert not layer.peepholes
|
Validates the type and state of a layer
|
_validateGenomeLayer
|
python
|
pybrain/pybrain
|
pybrain/supervised/evolino/networkwrapper.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/supervised/evolino/networkwrapper.py
|
BSD-3-Clause
|
def getGenome(self):
""" Returns the Genome of the network.
See class description for more details.
"""
weights = []
for layer in self.getHiddenLayers():
if isinstance(layer, LSTMLayer):
# if layer is not self._recurrence_layer:
weights += self._getGenomeOfLayer(layer)
return weights
|
Returns the Genome of the network.
See class description for more details.
|
getGenome
|
python
|
pybrain/pybrain
|
pybrain/supervised/evolino/networkwrapper.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/supervised/evolino/networkwrapper.py
|
BSD-3-Clause
|
def setGenome(self, weights):
""" Sets the Genome of the network.
See class description for more details.
"""
weights = deepcopy(weights)
for layer in self.getHiddenLayers():
if isinstance(layer, LSTMLayer):
# if layer is not self._recurrence_layer:
self._setGenomeOfLayer(layer, weights)
|
Sets the Genome of the network.
See class description for more details.
|
setGenome
|
python
|
pybrain/pybrain
|
pybrain/supervised/evolino/networkwrapper.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/supervised/evolino/networkwrapper.py
|
BSD-3-Clause
|
def _getGenomeOfLayer(self, layer):
""" Returns the genome of a single layer.
"""
self._validateGenomeLayer(layer)
dim = layer.outdim
layer_weights = []
connections = self._getInputConnectionsOfLayer(layer)
for cell_idx in range(dim):
# todo: the evolino paper uses a different order of weights for the genotype of a lstm cell
cell_weights = []
for c in connections:
cell_weights += [
c.getParameters()[ cell_idx + 0 * dim ],
c.getParameters()[ cell_idx + 1 * dim ],
c.getParameters()[ cell_idx + 2 * dim ],
c.getParameters()[ cell_idx + 3 * dim ] ]
layer_weights.append(cell_weights)
return layer_weights
|
Returns the genome of a single layer.
|
_getGenomeOfLayer
|
python
|
pybrain/pybrain
|
pybrain/supervised/evolino/networkwrapper.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/supervised/evolino/networkwrapper.py
|
BSD-3-Clause
|
def _setGenomeOfLayer(self, layer, weights):
""" Sets the genome of a single layer.
"""
self._validateGenomeLayer(layer)
dim = layer.outdim
connections = self._getInputConnectionsOfLayer(layer)
for cell_idx in range(dim):
cell_weights = weights.pop(0)
for c in connections:
params = c.getParameters()
params[cell_idx + 0 * dim] = cell_weights.pop(0)
params[cell_idx + 1 * dim] = cell_weights.pop(0)
params[cell_idx + 2 * dim] = cell_weights.pop(0)
params[cell_idx + 3 * dim] = cell_weights.pop(0)
assert not len(cell_weights)
|
Sets the genome of a single layer.
|
_setGenomeOfLayer
|
python
|
pybrain/pybrain
|
pybrain/supervised/evolino/networkwrapper.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/supervised/evolino/networkwrapper.py
|
BSD-3-Clause
|
def setOutputWeightMatrix(self, W):
""" Sets the weight matrix of the output layer's input connection.
"""
c = self.getOutputConnection()
p = c.getParameters()
p[:] = W.flatten()
|
Sets the weight matrix of the output layer's input connection.
|
setOutputWeightMatrix
|
python
|
pybrain/pybrain
|
pybrain/supervised/evolino/networkwrapper.py
|
https://github.com/pybrain/pybrain/blob/master/pybrain/supervised/evolino/networkwrapper.py
|
BSD-3-Clause
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.