repository_name
stringlengths
5
67
func_path_in_repository
stringlengths
4
234
func_name
stringlengths
0
314
whole_func_string
stringlengths
52
3.87M
language
stringclasses
6 values
func_code_string
stringlengths
52
3.87M
func_documentation_string
stringlengths
1
47.2k
func_code_url
stringlengths
85
339
Calysto/calysto
calysto/ai/conx.py
Network.getLayerIndex
def getLayerIndex(self, layer): """ Given a reference to a layer, returns the index of that layer in self.layers. """ for i in range(len(self.layers)): if layer == self.layers[i]: # shallow cmp return i return -1 # not in list
python
def getLayerIndex(self, layer): """ Given a reference to a layer, returns the index of that layer in self.layers. """ for i in range(len(self.layers)): if layer == self.layers[i]: # shallow cmp return i return -1 # not in list
Given a reference to a layer, returns the index of that layer in self.layers.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L911-L919
Calysto/calysto
calysto/ai/conx.py
Network.add
def add(self, layer, verbosity = 0, position = None): """ Adds a layer. Layer verbosity is optional (default 0). """ layer._verbosity = verbosity layer._maxRandom = self._maxRandom layer.minTarget = 0.0 layer.maxTarget = 1.0 layer.minActivation = 0.0 layer.maxActivation = 1.0 if position == None: self.layers.append(layer) else: self.layers.insert(position, layer) self.layersByName[layer.name] = layer
python
def add(self, layer, verbosity = 0, position = None): """ Adds a layer. Layer verbosity is optional (default 0). """ layer._verbosity = verbosity layer._maxRandom = self._maxRandom layer.minTarget = 0.0 layer.maxTarget = 1.0 layer.minActivation = 0.0 layer.maxActivation = 1.0 if position == None: self.layers.append(layer) else: self.layers.insert(position, layer) self.layersByName[layer.name] = layer
Adds a layer. Layer verbosity is optional (default 0).
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L927-L941
Calysto/calysto
calysto/ai/conx.py
Network.isConnected
def isConnected(self, fromName, toName): """ Are these two layers connected this way? """ for c in self.connections: if (c.fromLayer.name == fromName and c.toLayer.name == toName): return 1 return 0
python
def isConnected(self, fromName, toName): """ Are these two layers connected this way? """ for c in self.connections: if (c.fromLayer.name == fromName and c.toLayer.name == toName): return 1 return 0
Are these two layers connected this way?
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L942-L948
Calysto/calysto
calysto/ai/conx.py
Network.connect
def connect(self, *names): """ Connects a list of names, one to the next. """ fromName, toName, rest = names[0], names[1], names[2:] self.connectAt(fromName, toName) if len(rest) != 0: self.connect(toName, *rest)
python
def connect(self, *names): """ Connects a list of names, one to the next. """ fromName, toName, rest = names[0], names[1], names[2:] self.connectAt(fromName, toName) if len(rest) != 0: self.connect(toName, *rest)
Connects a list of names, one to the next.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L949-L956
Calysto/calysto
calysto/ai/conx.py
Network.connectAt
def connectAt(self, fromName, toName, position = None): """ Connects two layers by instantiating an instance of Connection class. Allows a position number, indicating the ordering of the connection. """ fromLayer = self.getLayer(fromName) toLayer = self.getLayer(toName) if self.getLayerIndex(fromLayer) >= self.getLayerIndex(toLayer): raise NetworkError('Layers out of order.', (fromLayer.name, toLayer.name)) if (fromLayer.type == 'Output'): fromLayer.type = 'Hidden' fromLayer.patternReport = 0 # automatically turned off for hidden layers if fromLayer.kind == 'Output': fromLayer.kind = 'Hidden' elif (fromLayer.type == 'Undefined'): fromLayer.type = 'Input' fromLayer.patternReport = 0 # automatically turned off for input layers if fromLayer.kind == 'Undefined': fromLayer.kind = 'Input' if (toLayer.type == 'Input'): raise NetworkError('Connections out of order', (fromLayer.name, toLayer.name)) elif (toLayer.type == 'Undefined'): toLayer.type = 'Output' toLayer.patternReport = 1 # automatically turned on for output layers if toLayer.kind == 'Undefined': toLayer.kind = 'Output' if position == None: self.connections.append(Connection(fromLayer, toLayer)) else: self.connections.insert(position, Connection(fromLayer, toLayer))
python
def connectAt(self, fromName, toName, position = None): """ Connects two layers by instantiating an instance of Connection class. Allows a position number, indicating the ordering of the connection. """ fromLayer = self.getLayer(fromName) toLayer = self.getLayer(toName) if self.getLayerIndex(fromLayer) >= self.getLayerIndex(toLayer): raise NetworkError('Layers out of order.', (fromLayer.name, toLayer.name)) if (fromLayer.type == 'Output'): fromLayer.type = 'Hidden' fromLayer.patternReport = 0 # automatically turned off for hidden layers if fromLayer.kind == 'Output': fromLayer.kind = 'Hidden' elif (fromLayer.type == 'Undefined'): fromLayer.type = 'Input' fromLayer.patternReport = 0 # automatically turned off for input layers if fromLayer.kind == 'Undefined': fromLayer.kind = 'Input' if (toLayer.type == 'Input'): raise NetworkError('Connections out of order', (fromLayer.name, toLayer.name)) elif (toLayer.type == 'Undefined'): toLayer.type = 'Output' toLayer.patternReport = 1 # automatically turned on for output layers if toLayer.kind == 'Undefined': toLayer.kind = 'Output' if position == None: self.connections.append(Connection(fromLayer, toLayer)) else: self.connections.insert(position, Connection(fromLayer, toLayer))
Connects two layers by instantiating an instance of Connection class. Allows a position number, indicating the ordering of the connection.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L957-L987
Calysto/calysto
calysto/ai/conx.py
Network.addLayers
def addLayers(self, *arg, **kw): """ Creates an N layer network with 'input', 'hidden1', 'hidden2',... and 'output' layers. Keyword type indicates "parallel" or "serial". If only one hidden layer, it is called "hidden". """ netType = "serial" if "type" in kw: netType = kw["type"] self.addLayer('input', arg[0]) hiddens = [] if len(arg) > 3: hcount = 0 for hidc in arg[1:-1]: name = 'hidden%d' % hcount self.addLayer(name, hidc) hiddens.append(name) hcount += 1 elif len(arg) == 3: name = 'hidden' self.addLayer(name, arg[1]) hiddens.append(name) elif len(arg) == 2: pass else: raise AttributeError("not enough layers! need >= 2") self.addLayer('output', arg[-1]) lastName = "input" for name in hiddens: if netType == "parallel": self.connect('input', name) self.connect(name, 'output') else: # serial self.connect(lastName, name) lastName = name if netType == "serial" or lastName == "input": self.connect(lastName, "output")
python
def addLayers(self, *arg, **kw): """ Creates an N layer network with 'input', 'hidden1', 'hidden2',... and 'output' layers. Keyword type indicates "parallel" or "serial". If only one hidden layer, it is called "hidden". """ netType = "serial" if "type" in kw: netType = kw["type"] self.addLayer('input', arg[0]) hiddens = [] if len(arg) > 3: hcount = 0 for hidc in arg[1:-1]: name = 'hidden%d' % hcount self.addLayer(name, hidc) hiddens.append(name) hcount += 1 elif len(arg) == 3: name = 'hidden' self.addLayer(name, arg[1]) hiddens.append(name) elif len(arg) == 2: pass else: raise AttributeError("not enough layers! need >= 2") self.addLayer('output', arg[-1]) lastName = "input" for name in hiddens: if netType == "parallel": self.connect('input', name) self.connect(name, 'output') else: # serial self.connect(lastName, name) lastName = name if netType == "serial" or lastName == "input": self.connect(lastName, "output")
Creates an N layer network with 'input', 'hidden1', 'hidden2',... and 'output' layers. Keyword type indicates "parallel" or "serial". If only one hidden layer, it is called "hidden".
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L998-L1034
Calysto/calysto
calysto/ai/conx.py
Network.deleteLayerNode
def deleteLayerNode(self, layername, nodeNum): """ Removes a particular unit/node from a layer. """ # first, construct an array of all of the weights # that won't be deleted: gene = [] for layer in self.layers: if layer.type != 'Input': for i in range(layer.size): if layer.name == layername and i == nodeNum: pass # skip it else: gene.append(layer.weight[i]) for connection in self.connections: for i in range(connection.fromLayer.size): for j in range(connection.toLayer.size): if ((connection.fromLayer.name == layername and i == nodeNum) or (connection.toLayer.name == layername and j == nodeNum)): pass # skip weights from/to nodeNum else: gene.append(connection.weight[i][j]) # now, change the size (removes rightmost node): self.changeLayerSize(layername, self[layername].size - 1) # and put the good weights where they go: self.unArrayify(gene)
python
def deleteLayerNode(self, layername, nodeNum): """ Removes a particular unit/node from a layer. """ # first, construct an array of all of the weights # that won't be deleted: gene = [] for layer in self.layers: if layer.type != 'Input': for i in range(layer.size): if layer.name == layername and i == nodeNum: pass # skip it else: gene.append(layer.weight[i]) for connection in self.connections: for i in range(connection.fromLayer.size): for j in range(connection.toLayer.size): if ((connection.fromLayer.name == layername and i == nodeNum) or (connection.toLayer.name == layername and j == nodeNum)): pass # skip weights from/to nodeNum else: gene.append(connection.weight[i][j]) # now, change the size (removes rightmost node): self.changeLayerSize(layername, self[layername].size - 1) # and put the good weights where they go: self.unArrayify(gene)
Removes a particular unit/node from a layer.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L1038-L1063
Calysto/calysto
calysto/ai/conx.py
Network.addLayerNode
def addLayerNode(self, layerName, bias = None, weights = {}): """ Adds a new node to a layer, and puts in new weights. Adds node on the end. Weights will be random, unless specified. bias = the new node's bias weight weights = dict of {connectedLayerName: [weights], ...} Example: >>> net = Network() # doctest: +ELLIPSIS Conx using seed: ... >>> net.addLayers(2, 5, 1) >>> net.addLayerNode("hidden", bias = -0.12, weights = {"input": [1, 0], "output": [0]}) """ self.changeLayerSize(layerName, self[layerName].size + 1) if bias != None: self[layerName].weight[-1] = bias for name in list(weights.keys()): for c in self.connections: if c.fromLayer.name == name and c.toLayer.name == layerName: for i in range(self[name].size): self[name, layerName].weight[i][-1] = weights[name][i] elif c.toLayer.name == name and c.fromLayer.name == layerName: for j in range(self[name].size): self[layerName, name].weight[-1][j] = weights[name][j]
python
def addLayerNode(self, layerName, bias = None, weights = {}): """ Adds a new node to a layer, and puts in new weights. Adds node on the end. Weights will be random, unless specified. bias = the new node's bias weight weights = dict of {connectedLayerName: [weights], ...} Example: >>> net = Network() # doctest: +ELLIPSIS Conx using seed: ... >>> net.addLayers(2, 5, 1) >>> net.addLayerNode("hidden", bias = -0.12, weights = {"input": [1, 0], "output": [0]}) """ self.changeLayerSize(layerName, self[layerName].size + 1) if bias != None: self[layerName].weight[-1] = bias for name in list(weights.keys()): for c in self.connections: if c.fromLayer.name == name and c.toLayer.name == layerName: for i in range(self[name].size): self[name, layerName].weight[i][-1] = weights[name][i] elif c.toLayer.name == name and c.fromLayer.name == layerName: for j in range(self[name].size): self[layerName, name].weight[-1][j] = weights[name][j]
Adds a new node to a layer, and puts in new weights. Adds node on the end. Weights will be random, unless specified. bias = the new node's bias weight weights = dict of {connectedLayerName: [weights], ...} Example: >>> net = Network() # doctest: +ELLIPSIS Conx using seed: ... >>> net.addLayers(2, 5, 1) >>> net.addLayerNode("hidden", bias = -0.12, weights = {"input": [1, 0], "output": [0]})
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L1064-L1088
Calysto/calysto
calysto/ai/conx.py
Network.changeLayerSize
def changeLayerSize(self, layername, newsize): """ Changes layer size. Newsize must be greater than zero. """ # for all connection from to this layer, change matrix: if self.sharedWeights: raise AttributeError("shared weights broken") for connection in self.connections: if connection.fromLayer.name == layername: connection.changeSize( newsize, connection.toLayer.size ) if connection.toLayer.name == layername: connection.changeSize( connection.fromLayer.size, newsize ) # then, change the actual layer size: self.getLayer(layername).changeSize(newsize)
python
def changeLayerSize(self, layername, newsize): """ Changes layer size. Newsize must be greater than zero. """ # for all connection from to this layer, change matrix: if self.sharedWeights: raise AttributeError("shared weights broken") for connection in self.connections: if connection.fromLayer.name == layername: connection.changeSize( newsize, connection.toLayer.size ) if connection.toLayer.name == layername: connection.changeSize( connection.fromLayer.size, newsize ) # then, change the actual layer size: self.getLayer(layername).changeSize(newsize)
Changes layer size. Newsize must be greater than zero.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L1089-L1102
Calysto/calysto
calysto/ai/conx.py
Network.initialize
def initialize(self): """ Initializes network by calling Connection.initialize() and Layer.initialize(). self.count is set to zero. """ print("Initializing '%s' weights..." % self.name, end=" ", file=sys.stderr) if self.sharedWeights: raise AttributeError("shared weights broken") self.count = 0 for connection in self.connections: connection.initialize() for layer in self.layers: layer.initialize()
python
def initialize(self): """ Initializes network by calling Connection.initialize() and Layer.initialize(). self.count is set to zero. """ print("Initializing '%s' weights..." % self.name, end=" ", file=sys.stderr) if self.sharedWeights: raise AttributeError("shared weights broken") self.count = 0 for connection in self.connections: connection.initialize() for layer in self.layers: layer.initialize()
Initializes network by calling Connection.initialize() and Layer.initialize(). self.count is set to zero.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L1110-L1122
Calysto/calysto
calysto/ai/conx.py
Network.putActivations
def putActivations(self, dict): """ Puts a dict of name: activations into their respective layers. """ for name in dict: self.layersByName[name].copyActivations( dict[name] )
python
def putActivations(self, dict): """ Puts a dict of name: activations into their respective layers. """ for name in dict: self.layersByName[name].copyActivations( dict[name] )
Puts a dict of name: activations into their respective layers.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L1131-L1136
Calysto/calysto
calysto/ai/conx.py
Network.getActivationsDict
def getActivationsDict(self, nameList): """ Returns a dictionary of layer names that map to a list of activations. """ retval = {} for name in nameList: retval[name] = self.layersByName[name].getActivationsList() return retval
python
def getActivationsDict(self, nameList): """ Returns a dictionary of layer names that map to a list of activations. """ retval = {} for name in nameList: retval[name] = self.layersByName[name].getActivationsList() return retval
Returns a dictionary of layer names that map to a list of activations.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L1137-L1144
Calysto/calysto
calysto/ai/conx.py
Network.setSeed
def setSeed(self, value): """ Sets the seed to value. """ self.seed = value random.seed(self.seed) if self.verbosity >= 0: print("Conx using seed:", self.seed)
python
def setSeed(self, value): """ Sets the seed to value. """ self.seed = value random.seed(self.seed) if self.verbosity >= 0: print("Conx using seed:", self.seed)
Sets the seed to value.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L1218-L1225
Calysto/calysto
calysto/ai/conx.py
Network.getConnection
def getConnection(self, lfrom, lto): """ Returns the connection instance connecting the specified (string) layer names. """ for connection in self.connections: if connection.fromLayer.name == lfrom and \ connection.toLayer.name == lto: return connection raise NetworkError('Connection was not found.', (lfrom, lto))
python
def getConnection(self, lfrom, lto): """ Returns the connection instance connecting the specified (string) layer names. """ for connection in self.connections: if connection.fromLayer.name == lfrom and \ connection.toLayer.name == lto: return connection raise NetworkError('Connection was not found.', (lfrom, lto))
Returns the connection instance connecting the specified (string) layer names.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L1226-L1235
Calysto/calysto
calysto/ai/conx.py
Network.setVerbosity
def setVerbosity(self, value): """ Sets network self._verbosity and each layer._verbosity to value. """ self._verbosity = value for layer in self.layers: layer._verbosity = value
python
def setVerbosity(self, value): """ Sets network self._verbosity and each layer._verbosity to value. """ self._verbosity = value for layer in self.layers: layer._verbosity = value
Sets network self._verbosity and each layer._verbosity to value.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L1236-L1242
Calysto/calysto
calysto/ai/conx.py
Network.setMaxRandom
def setMaxRandom(self, value): """ Sets the maxRandom Layer attribute for each layer to value.Specifies the global range for randomly initialized values, [-max, max]. """ self._maxRandom = value for layer in self.layers: layer._maxRandom = value
python
def setMaxRandom(self, value): """ Sets the maxRandom Layer attribute for each layer to value.Specifies the global range for randomly initialized values, [-max, max]. """ self._maxRandom = value for layer in self.layers: layer._maxRandom = value
Sets the maxRandom Layer attribute for each layer to value.Specifies the global range for randomly initialized values, [-max, max].
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L1276-L1283
Calysto/calysto
calysto/ai/conx.py
Network.getWeights
def getWeights(self, fromName, toName): """ Gets the weights of the connection between two layers (argument strings). """ for connection in self.connections: if connection.fromLayer.name == fromName and \ connection.toLayer.name == toName: return connection.weight raise NetworkError('Connection was not found.', (fromName, toName))
python
def getWeights(self, fromName, toName): """ Gets the weights of the connection between two layers (argument strings). """ for connection in self.connections: if connection.fromLayer.name == fromName and \ connection.toLayer.name == toName: return connection.weight raise NetworkError('Connection was not found.', (fromName, toName))
Gets the weights of the connection between two layers (argument strings).
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L1284-L1292
Calysto/calysto
calysto/ai/conx.py
Network.setWeight
def setWeight(self, fromName, fromPos, toName, toPos, value): """ Sets the weight of the connection between two layers (argument strings). """ for connection in self.connections: if connection.fromLayer.name == fromName and \ connection.toLayer.name == toName: connection.weight[fromPos][toPos] = value return value raise NetworkError('Connection was not found.', (fromName, toName))
python
def setWeight(self, fromName, fromPos, toName, toPos, value): """ Sets the weight of the connection between two layers (argument strings). """ for connection in self.connections: if connection.fromLayer.name == fromName and \ connection.toLayer.name == toName: connection.weight[fromPos][toPos] = value return value raise NetworkError('Connection was not found.', (fromName, toName))
Sets the weight of the connection between two layers (argument strings).
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L1293-L1302
Calysto/calysto
calysto/ai/conx.py
Network.setOrderedInputs
def setOrderedInputs(self, value): """ Sets self.orderedInputs to value. Specifies if inputs should be ordered and if so orders the inputs. """ self.orderedInputs = value if self.orderedInputs: self.loadOrder = [0] * len(self.inputs) for i in range(len(self.inputs)): self.loadOrder[i] = i
python
def setOrderedInputs(self, value): """ Sets self.orderedInputs to value. Specifies if inputs should be ordered and if so orders the inputs. """ self.orderedInputs = value if self.orderedInputs: self.loadOrder = [0] * len(self.inputs) for i in range(len(self.inputs)): self.loadOrder[i] = i
Sets self.orderedInputs to value. Specifies if inputs should be ordered and if so orders the inputs.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L1303-L1312
Calysto/calysto
calysto/ai/conx.py
Network.verifyArguments
def verifyArguments(self, arg): """ Verifies that arguments to setInputs and setTargets are appropriately formatted. """ for l in arg: if not type(l) == list and \ not type(l) == type(Numeric.array([0.0])) and \ not type(l) == tuple and \ not type(l) == dict: return 0 if type(l) == dict: for i in l: if not type(i) == str and i not in list(self.layers.keys()): return 0 else: for i in l: if not type(i) == float and not type(i) == int: return 0 return 1
python
def verifyArguments(self, arg): """ Verifies that arguments to setInputs and setTargets are appropriately formatted. """ for l in arg: if not type(l) == list and \ not type(l) == type(Numeric.array([0.0])) and \ not type(l) == tuple and \ not type(l) == dict: return 0 if type(l) == dict: for i in l: if not type(i) == str and i not in list(self.layers.keys()): return 0 else: for i in l: if not type(i) == float and not type(i) == int: return 0 return 1
Verifies that arguments to setInputs and setTargets are appropriately formatted.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L1313-L1331
Calysto/calysto
calysto/ai/conx.py
Network.setInputs
def setInputs(self, inputs): """ Sets self.input to inputs. Load order is by default random. Use setOrderedInputs() to order inputs. """ if not self.verifyArguments(inputs) and not self.patterned: raise NetworkError('setInputs() requires [[...],[...],...] or [{"layerName": [...]}, ...].', inputs) self.inputs = inputs self.loadOrder = [0] * len(self.inputs) for i in range(len(self.inputs)): self.loadOrder[i] = i
python
def setInputs(self, inputs): """ Sets self.input to inputs. Load order is by default random. Use setOrderedInputs() to order inputs. """ if not self.verifyArguments(inputs) and not self.patterned: raise NetworkError('setInputs() requires [[...],[...],...] or [{"layerName": [...]}, ...].', inputs) self.inputs = inputs self.loadOrder = [0] * len(self.inputs) for i in range(len(self.inputs)): self.loadOrder[i] = i
Sets self.input to inputs. Load order is by default random. Use setOrderedInputs() to order inputs.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L1332-L1341
Calysto/calysto
calysto/ai/conx.py
Network.setTargets
def setTargets(self, targets): """ Sets the targets. """ if not self.verifyArguments(targets) and not self.patterned: raise NetworkError('setTargets() requires [[...],[...],...] or [{"layerName": [...]}, ...].', targets) self.targets = targets
python
def setTargets(self, targets): """ Sets the targets. """ if not self.verifyArguments(targets) and not self.patterned: raise NetworkError('setTargets() requires [[...],[...],...] or [{"layerName": [...]}, ...].', targets) self.targets = targets
Sets the targets.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L1348-L1354
Calysto/calysto
calysto/ai/conx.py
Network.setInputsAndTargets
def setInputsAndTargets(self, data1, data2=None): """ Network.setInputsAndTargets() Sets the corpus of data for training. Can be in one of two formats: Format 1: setInputsAndTargets([[input0, target0], [input1, target1]...]) Network.setInputsAndTargets([[[i00, i01, ...], [t00, t01, ...]], [[i10, i11, ...], [t10, t11, ...]], ...]) Format 2: setInputsAndTargets([input0, input1, ...], [target0, target1, ...]) Network.setInputsAndTargets([[i00, i01, ...], [i10, i11, ...],...], [[t00, t01, ...], [t10, t11, ...],...] ) """ if data2 == None: # format #1 inputs = [x[0] for x in data1] targets = [x[1] for x in data1] else: # format 2 inputs = data1 targets = data2 self.setInputs(inputs) self.setTargets(targets)
python
def setInputsAndTargets(self, data1, data2=None): """ Network.setInputsAndTargets() Sets the corpus of data for training. Can be in one of two formats: Format 1: setInputsAndTargets([[input0, target0], [input1, target1]...]) Network.setInputsAndTargets([[[i00, i01, ...], [t00, t01, ...]], [[i10, i11, ...], [t10, t11, ...]], ...]) Format 2: setInputsAndTargets([input0, input1, ...], [target0, target1, ...]) Network.setInputsAndTargets([[i00, i01, ...], [i10, i11, ...],...], [[t00, t01, ...], [t10, t11, ...],...] ) """ if data2 == None: # format #1 inputs = [x[0] for x in data1] targets = [x[1] for x in data1] else: # format 2 inputs = data1 targets = data2 self.setInputs(inputs) self.setTargets(targets)
Network.setInputsAndTargets() Sets the corpus of data for training. Can be in one of two formats: Format 1: setInputsAndTargets([[input0, target0], [input1, target1]...]) Network.setInputsAndTargets([[[i00, i01, ...], [t00, t01, ...]], [[i10, i11, ...], [t10, t11, ...]], ...]) Format 2: setInputsAndTargets([input0, input1, ...], [target0, target1, ...]) Network.setInputsAndTargets([[i00, i01, ...], [i10, i11, ...],...], [[t00, t01, ...], [t10, t11, ...],...] )
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L1355-L1376
Calysto/calysto
calysto/ai/conx.py
Network.randomizeOrder
def randomizeOrder(self): """ Randomizes self.loadOrder, the order in which inputs set with self.setInputs() are presented. """ flag = [0] * len(self.inputs) self.loadOrder = [0] * len(self.inputs) for i in range(len(self.inputs)): pos = int(random.random() * len(self.inputs)) while (flag[pos] == 1): pos = int(random.random() * len(self.inputs)) flag[pos] = 1 self.loadOrder[pos] = i
python
def randomizeOrder(self): """ Randomizes self.loadOrder, the order in which inputs set with self.setInputs() are presented. """ flag = [0] * len(self.inputs) self.loadOrder = [0] * len(self.inputs) for i in range(len(self.inputs)): pos = int(random.random() * len(self.inputs)) while (flag[pos] == 1): pos = int(random.random() * len(self.inputs)) flag[pos] = 1 self.loadOrder[pos] = i
Randomizes self.loadOrder, the order in which inputs set with self.setInputs() are presented.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L1400-L1412
Calysto/calysto
calysto/ai/conx.py
Network.copyVector
def copyVector(self, vector1, vec2, start): """ Copies vec2 into vector1 being sure to replace patterns if necessary. Use self.copyActivations() or self.copyTargets() instead. """ vector2 = self.replacePatterns(vec2) length = min(len(vector1), len(vector2)) if self.verbosity > 4: print("Copying Vector: ", vector2[start:start+length]) p = 0 for i in range(start, start + length): vector1[p] = vector2[i] p += 1
python
def copyVector(self, vector1, vec2, start): """ Copies vec2 into vector1 being sure to replace patterns if necessary. Use self.copyActivations() or self.copyTargets() instead. """ vector2 = self.replacePatterns(vec2) length = min(len(vector1), len(vector2)) if self.verbosity > 4: print("Copying Vector: ", vector2[start:start+length]) p = 0 for i in range(start, start + length): vector1[p] = vector2[i] p += 1
Copies vec2 into vector1 being sure to replace patterns if necessary. Use self.copyActivations() or self.copyTargets() instead.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L1414-L1427
Calysto/calysto
calysto/ai/conx.py
Network.copyActivations
def copyActivations(self, layer, vec, start = 0): """ Copies activations in vec to the specified layer, replacing patterns if necessary. """ vector = self.replacePatterns(vec, layer.name) if self.verbosity > 4: print("Copying Activations: ", vector[start:start+layer.size]) layer.copyActivations(vector[start:start+layer.size])
python
def copyActivations(self, layer, vec, start = 0): """ Copies activations in vec to the specified layer, replacing patterns if necessary. """ vector = self.replacePatterns(vec, layer.name) if self.verbosity > 4: print("Copying Activations: ", vector[start:start+layer.size]) layer.copyActivations(vector[start:start+layer.size])
Copies activations in vec to the specified layer, replacing patterns if necessary.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L1428-L1436
Calysto/calysto
calysto/ai/conx.py
Network.getDataCrossValidation
def getDataCrossValidation(self, pos): """ Returns the inputs/targets for a pattern pos, or assumes that the layers are called input and output and uses the lists in self.inputs and self.targets. """ set = {} if type(self.inputs[pos]) == dict: set.update(self.inputs[pos]) else: set["input"] = self.inputs[pos] if self.targets: if type(self.targets[pos]) == dict: set.update(self.targets[pos]) else: set["output"] = self.targets[pos] return set
python
def getDataCrossValidation(self, pos): """ Returns the inputs/targets for a pattern pos, or assumes that the layers are called input and output and uses the lists in self.inputs and self.targets. """ set = {} if type(self.inputs[pos]) == dict: set.update(self.inputs[pos]) else: set["input"] = self.inputs[pos] if self.targets: if type(self.targets[pos]) == dict: set.update(self.targets[pos]) else: set["output"] = self.targets[pos] return set
Returns the inputs/targets for a pattern pos, or assumes that the layers are called input and output and uses the lists in self.inputs and self.targets.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L1446-L1462
Calysto/calysto
calysto/ai/conx.py
Network.getDataMap
def getDataMap(self, intype, pos, name, offset = 0): """ Hook defined to lookup a name, and get it from a vector. Can be overloaded to get it from somewhere else. """ if intype == "input": vector = self.inputs elif intype == "target": vector = self.targets else: raise AttributeError("invalid map type '%s'" % intype) return vector[pos][offset:offset+self[name].size]
python
def getDataMap(self, intype, pos, name, offset = 0): """ Hook defined to lookup a name, and get it from a vector. Can be overloaded to get it from somewhere else. """ if intype == "input": vector = self.inputs elif intype == "target": vector = self.targets else: raise AttributeError("invalid map type '%s'" % intype) return vector[pos][offset:offset+self[name].size]
Hook defined to lookup a name, and get it from a vector. Can be overloaded to get it from somewhere else.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L1463-L1474
Calysto/calysto
calysto/ai/conx.py
Network.getData
def getData(self, pos): """ Returns dictionary with input and target given pos. """ retval = {} if pos >= len(self.inputs): raise IndexError('getData() pattern beyond range.', pos) if self.verbosity >= 1: print("Getting input", pos, "...") if len(self.inputMap) == 0: if type(self.inputs[pos]) == dict: # allow inputs to be a dict retval.update(self.inputs[pos]) else: retval[self.layers[0].name] = self.inputs[pos] else: # mapInput set manually for vals in self.inputMap: (name, offset) = vals retval[name] = self.getDataMap("input", pos, name, offset) if self.verbosity > 1: print("Loading target", pos, "...") if len(self.targets) == 0: pass # ok, no targets elif len(self.targetMap) == 0: if type(self.targets[pos]) == dict: # allow targets to be a dict retval.update(self.targets[pos]) else: retval[self.layers[len(self.layers)-1].name] = self.targets[pos] else: # set manually for vals in self.targetMap: (name, offset) = vals retval[name] = self.getDataMap("target", pos, name, offset) return retval
python
def getData(self, pos): """ Returns dictionary with input and target given pos. """ retval = {} if pos >= len(self.inputs): raise IndexError('getData() pattern beyond range.', pos) if self.verbosity >= 1: print("Getting input", pos, "...") if len(self.inputMap) == 0: if type(self.inputs[pos]) == dict: # allow inputs to be a dict retval.update(self.inputs[pos]) else: retval[self.layers[0].name] = self.inputs[pos] else: # mapInput set manually for vals in self.inputMap: (name, offset) = vals retval[name] = self.getDataMap("input", pos, name, offset) if self.verbosity > 1: print("Loading target", pos, "...") if len(self.targets) == 0: pass # ok, no targets elif len(self.targetMap) == 0: if type(self.targets[pos]) == dict: # allow targets to be a dict retval.update(self.targets[pos]) else: retval[self.layers[len(self.layers)-1].name] = self.targets[pos] else: # set manually for vals in self.targetMap: (name, offset) = vals retval[name] = self.getDataMap("target", pos, name, offset) return retval
Returns dictionary with input and target given pos.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L1475-L1504
Calysto/calysto
calysto/ai/conx.py
Network.verifyArchitecture
def verifyArchitecture(self): """ Check for orphaned layers or connections. Assure that network architecture is feed-forward (no-cycles). Check connectivity. Check naming. """ if len(self.cacheLayers) != 0 or len(self.cacheConnections) != 0: return # flags for layer type tests hiddenInput = 1 hiddenOutput = 1 outputLayerFlag = 1 inputLayerFlag = 1 # must have layers and connections if len(self.layers) == 0: raise NetworkError('No network layers.', \ self.layers) if len(self.connections) == 0: raise NetworkError('No network connections.', \ self.connections) # layers should not have the same name for x, y in [(x, y) for x in range(len(self.layers)) for y in range(len(self.layers))]: if x == y: pass # same layer so same name else: # different layers same name if self.layers[x].name == self.layers[y].name: raise NetworkError('Two layers have the same name.', (x,y)) # no multiple connections between layers for x, y in [(x,y) for x in range(len(self.connections)) for y in range(len(self.connections))]: if x == y: pass # same connection else: # multiple connections between fromLayer and toLayer if self.connections[x].fromLayer.name == self.connections[y].fromLayer.name and \ self.connections[x].toLayer.name == self.connections[y].toLayer.name: raise NetworkError('Multiple connections between two layers.', \ (self.connections[x].fromLayer.name, \ self.connections[x].toLayer.name)) # layer type tests for layer in self.layers: # no undefined layers if layer.type == 'Undefined': raise NetworkError('There is an unconnected layer.', layer.name) elif layer.type == 'Input': for connection in self.connections: # input layers must have outgoing connections if connection.fromLayer.name == layer.name: inputLayerFlag = 0 # input layers must have no incoming connections if connection.toLayer.name == layer.name: raise NetworkError('Layer has type \'Input\' and an incoming connection.', layer.name) if inputLayerFlag: raise NetworkError('Layer has type \'Input\' but no outgoing connections', layer.name) elif layer.type == 'Output': for connection in self.connections: # output layers must have no outgoing connections` if connection.fromLayer.name == layer.name: raise NetworkError('Layer has type \'Output\' and an outgoing connections.',layer.name) # output layers must have an incoming connection if connection.toLayer.name == layer.name: outputLayerFlag = 0 if outputLayerFlag: raise NetworkError('Layer has type \'Output\' and no incoming connections.', layer.name) elif layer.type == 'Hidden': for connection in self.connections: # hidden layers must have incoming and outgoing connections. if connection.toLayer.name == layer.name: hiddenInput = 0 if connection.fromLayer.name == layer.name: hiddenOutput = 0 if hiddenInput or hiddenOutput: raise NetworkError('Layer has type \'Hidden\' but does not have both input and output connections.',\ layer.name) else: raise LayerError('Unknown layer encountered in verifyArchitecture().', layer.name) # network should not have unconnected sub networks # every input layer should have a path to every output layer for inLayer in self.layers: if inLayer.type == 'Input': for outLayer in self.layers: if outLayer.type == 'Output': if not self.path(inLayer, outLayer): raise NetworkError('Network contains disconnected sub networks.', \ (inLayer.name, outLayer.name)) # network should not have directed cycles for layer in self.layers: if self.path(layer, layer): raise NetworkError('Network contains a cycle.', layer.name)
python
def verifyArchitecture(self): """ Check for orphaned layers or connections. Assure that network architecture is feed-forward (no-cycles). Check connectivity. Check naming. """ if len(self.cacheLayers) != 0 or len(self.cacheConnections) != 0: return # flags for layer type tests hiddenInput = 1 hiddenOutput = 1 outputLayerFlag = 1 inputLayerFlag = 1 # must have layers and connections if len(self.layers) == 0: raise NetworkError('No network layers.', \ self.layers) if len(self.connections) == 0: raise NetworkError('No network connections.', \ self.connections) # layers should not have the same name for x, y in [(x, y) for x in range(len(self.layers)) for y in range(len(self.layers))]: if x == y: pass # same layer so same name else: # different layers same name if self.layers[x].name == self.layers[y].name: raise NetworkError('Two layers have the same name.', (x,y)) # no multiple connections between layers for x, y in [(x,y) for x in range(len(self.connections)) for y in range(len(self.connections))]: if x == y: pass # same connection else: # multiple connections between fromLayer and toLayer if self.connections[x].fromLayer.name == self.connections[y].fromLayer.name and \ self.connections[x].toLayer.name == self.connections[y].toLayer.name: raise NetworkError('Multiple connections between two layers.', \ (self.connections[x].fromLayer.name, \ self.connections[x].toLayer.name)) # layer type tests for layer in self.layers: # no undefined layers if layer.type == 'Undefined': raise NetworkError('There is an unconnected layer.', layer.name) elif layer.type == 'Input': for connection in self.connections: # input layers must have outgoing connections if connection.fromLayer.name == layer.name: inputLayerFlag = 0 # input layers must have no incoming connections if connection.toLayer.name == layer.name: raise NetworkError('Layer has type \'Input\' and an incoming connection.', layer.name) if inputLayerFlag: raise NetworkError('Layer has type \'Input\' but no outgoing connections', layer.name) elif layer.type == 'Output': for connection in self.connections: # output layers must have no outgoing connections` if connection.fromLayer.name == layer.name: raise NetworkError('Layer has type \'Output\' and an outgoing connections.',layer.name) # output layers must have an incoming connection if connection.toLayer.name == layer.name: outputLayerFlag = 0 if outputLayerFlag: raise NetworkError('Layer has type \'Output\' and no incoming connections.', layer.name) elif layer.type == 'Hidden': for connection in self.connections: # hidden layers must have incoming and outgoing connections. if connection.toLayer.name == layer.name: hiddenInput = 0 if connection.fromLayer.name == layer.name: hiddenOutput = 0 if hiddenInput or hiddenOutput: raise NetworkError('Layer has type \'Hidden\' but does not have both input and output connections.',\ layer.name) else: raise LayerError('Unknown layer encountered in verifyArchitecture().', layer.name) # network should not have unconnected sub networks # every input layer should have a path to every output layer for inLayer in self.layers: if inLayer.type == 'Input': for outLayer in self.layers: if outLayer.type == 'Output': if not self.path(inLayer, outLayer): raise NetworkError('Network contains disconnected sub networks.', \ (inLayer.name, outLayer.name)) # network should not have directed cycles for layer in self.layers: if self.path(layer, layer): raise NetworkError('Network contains a cycle.', layer.name)
Check for orphaned layers or connections. Assure that network architecture is feed-forward (no-cycles). Check connectivity. Check naming.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L1507-L1593
Calysto/calysto
calysto/ai/conx.py
Network.verifyInputs
def verifyInputs(self): """ Used in propagate() to verify that the network input activations have been set. """ for layer in self.layers: if (layer.verify and layer.type == 'Input' and layer.kind != 'Context' and layer.active and not layer.activationSet): raise LayerError("Inputs are not set and verifyInputs() was called on layer '%s'." % layer.name) else: layer.resetActivationFlag()
python
def verifyInputs(self): """ Used in propagate() to verify that the network input activations have been set. """ for layer in self.layers: if (layer.verify and layer.type == 'Input' and layer.kind != 'Context' and layer.active and not layer.activationSet): raise LayerError("Inputs are not set and verifyInputs() was called on layer '%s'." % layer.name) else: layer.resetActivationFlag()
Used in propagate() to verify that the network input activations have been set.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L1594-L1607
Calysto/calysto
calysto/ai/conx.py
Network.verifyTargets
def verifyTargets(self): """ Used in backprop() to verify that the network targets have been set. """ for layer in self.layers: if layer.verify and layer.type == 'Output' and layer.active and not layer.targetSet: raise LayerError('Targets are not set and verifyTargets() was called.',\ (layer.name, layer.type)) else: layer.resetTargetFlag()
python
def verifyTargets(self): """ Used in backprop() to verify that the network targets have been set. """ for layer in self.layers: if layer.verify and layer.type == 'Output' and layer.active and not layer.targetSet: raise LayerError('Targets are not set and verifyTargets() was called.',\ (layer.name, layer.type)) else: layer.resetTargetFlag()
Used in backprop() to verify that the network targets have been set.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L1608-L1618
Calysto/calysto
calysto/ai/conx.py
Network.RMSError
def RMSError(self): """ Returns Root Mean Squared Error for all output layers in this network. """ tss = 0.0 size = 0 for layer in self.layers: if layer.type == 'Output': tss += layer.TSSError() size += layer.size return math.sqrt( tss / size )
python
def RMSError(self): """ Returns Root Mean Squared Error for all output layers in this network. """ tss = 0.0 size = 0 for layer in self.layers: if layer.type == 'Output': tss += layer.TSSError() size += layer.size return math.sqrt( tss / size )
Returns Root Mean Squared Error for all output layers in this network.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L1627-L1637
Calysto/calysto
calysto/ai/conx.py
Network.train
def train(self, sweeps=None, cont=0): """ Trains the network on the dataset till a stopping condition is met. This stopping condition can be a limiting epoch or a percentage correct requirement. """ # check architecture self.complete = 0 self.verifyArchitecture() tssErr = 0.0; rmsErr = 0.0; totalCorrect = 0; totalCount = 1; totalPCorrect = {} if not cont: # starting afresh self.resetFlags() self.epoch = 0 self.reportStart() self.resetCount = 1 self.epoch = 1 self.lastLowestTSSError = sys.maxsize # some maximum value (not all pythons have Infinity) if sweeps != None: self.resetEpoch = sweeps else: if sweeps != None: self.resetEpoch = self.epoch + sweeps - 1 while self.doWhile(totalCount, totalCorrect): (tssErr, totalCorrect, totalCount, totalPCorrect) = self.sweep() self.complete = 1 if totalCount != 0: rmsErr = math.sqrt(tssErr / totalCount) else: self.Print("Warning: sweep didn't do anything!") if self.epoch % self.reportRate == 0: self.reportEpoch(self.epoch, tssErr, totalCorrect, totalCount, rmsErr, totalPCorrect) if len(self.crossValidationCorpus) > 0 or self.autoCrossValidation: (tssCVErr, totalCVCorrect, totalCVCount, totalCVPCorrect) = self.sweepCrossValidation() rmsCVErr = math.sqrt(tssCVErr / totalCVCount) self.Print("CV #%6d | TSS Error: %.4f | Correct: %.4f | RMS Error: %.4f" % \ (self.epoch, tssCVErr, totalCVCorrect * 1.0 / totalCVCount, rmsCVErr)) if self.autoSaveWeightsFile != None and tssCVErr < self.lastLowestTSSError: self.lastLowestTSSError = tssCVErr self.saveWeights(self.autoSaveWeightsFile, mode = self.autoSaveWeightsFileFormat) self.Print("auto saving weights to '%s'..." % self.lastAutoSaveWeightsFilename) if self.autoSaveNetworkFile != None: self.saveNetwork(self.autoSaveNetworkFile, mode = self.autoSaveNetworkFileFormat) self.Print("auto saving network to '%s'..." % self.lastAutoSaveNetworkFilename) if totalCVCorrect * 1.0 / totalCVCount >= self.stopPercent and self.useCrossValidationToStop: self.epoch += 1 break if self.resetEpoch == self.epoch: if self.resetCount == self.resetLimit: self.Print("Reset limit reached; ending without reaching goal") self.epoch += 1 self.complete = 0 break self.resetCount += 1 self.Print("RESET! resetEpoch reached; starting over...") self.initialize() tssErr = 0.0; rmsErr = 0.0; self.epoch = 1; totalCorrect = 0; totalPCorrect = {} continue self.epoch += 1 if totalCount > 0: self.reportFinal(self.epoch, tssErr, totalCorrect, totalCount, rmsErr, totalPCorrect) if len(self.crossValidationCorpus) > 0 or self.autoCrossValidation: (tssCVErr, totalCVCorrect, totalCVCount, totalCVPCorrect) = self.sweepCrossValidation() rmsCVErr = math.sqrt(tssCVErr / totalCVCount) self.Print("CV #%6d | TSS Error: %.4f | Correct: %.4f | RMS Error: %.4f" % \ (self.epoch-1, tssCVErr, totalCVCorrect * 1.0 / totalCVCount, rmsCVErr)) if self.autoSaveWeightsFile != None and tssCVErr < self.lastLowestTSSError: self.lastLowestTSSError = tssCVErr self.saveWeights(self.autoSaveWeightsFile, mode = self.autoSaveWeightsFileFormat) self.Print("auto saving weights to '%s'..." % self.lastAutoSaveWeightsFilename) if self.autoSaveNetworkFile != None: self.saveNetwork(self.autoSaveNetworkFile, mode = self.autoSaveNetworkFileFormat) self.Print("auto saving network to '%s'..." % self.lastAutoSaveNetworkFilename) else: print("Final: nothing done")
python
def train(self, sweeps=None, cont=0): """ Trains the network on the dataset till a stopping condition is met. This stopping condition can be a limiting epoch or a percentage correct requirement. """ # check architecture self.complete = 0 self.verifyArchitecture() tssErr = 0.0; rmsErr = 0.0; totalCorrect = 0; totalCount = 1; totalPCorrect = {} if not cont: # starting afresh self.resetFlags() self.epoch = 0 self.reportStart() self.resetCount = 1 self.epoch = 1 self.lastLowestTSSError = sys.maxsize # some maximum value (not all pythons have Infinity) if sweeps != None: self.resetEpoch = sweeps else: if sweeps != None: self.resetEpoch = self.epoch + sweeps - 1 while self.doWhile(totalCount, totalCorrect): (tssErr, totalCorrect, totalCount, totalPCorrect) = self.sweep() self.complete = 1 if totalCount != 0: rmsErr = math.sqrt(tssErr / totalCount) else: self.Print("Warning: sweep didn't do anything!") if self.epoch % self.reportRate == 0: self.reportEpoch(self.epoch, tssErr, totalCorrect, totalCount, rmsErr, totalPCorrect) if len(self.crossValidationCorpus) > 0 or self.autoCrossValidation: (tssCVErr, totalCVCorrect, totalCVCount, totalCVPCorrect) = self.sweepCrossValidation() rmsCVErr = math.sqrt(tssCVErr / totalCVCount) self.Print("CV #%6d | TSS Error: %.4f | Correct: %.4f | RMS Error: %.4f" % \ (self.epoch, tssCVErr, totalCVCorrect * 1.0 / totalCVCount, rmsCVErr)) if self.autoSaveWeightsFile != None and tssCVErr < self.lastLowestTSSError: self.lastLowestTSSError = tssCVErr self.saveWeights(self.autoSaveWeightsFile, mode = self.autoSaveWeightsFileFormat) self.Print("auto saving weights to '%s'..." % self.lastAutoSaveWeightsFilename) if self.autoSaveNetworkFile != None: self.saveNetwork(self.autoSaveNetworkFile, mode = self.autoSaveNetworkFileFormat) self.Print("auto saving network to '%s'..." % self.lastAutoSaveNetworkFilename) if totalCVCorrect * 1.0 / totalCVCount >= self.stopPercent and self.useCrossValidationToStop: self.epoch += 1 break if self.resetEpoch == self.epoch: if self.resetCount == self.resetLimit: self.Print("Reset limit reached; ending without reaching goal") self.epoch += 1 self.complete = 0 break self.resetCount += 1 self.Print("RESET! resetEpoch reached; starting over...") self.initialize() tssErr = 0.0; rmsErr = 0.0; self.epoch = 1; totalCorrect = 0; totalPCorrect = {} continue self.epoch += 1 if totalCount > 0: self.reportFinal(self.epoch, tssErr, totalCorrect, totalCount, rmsErr, totalPCorrect) if len(self.crossValidationCorpus) > 0 or self.autoCrossValidation: (tssCVErr, totalCVCorrect, totalCVCount, totalCVPCorrect) = self.sweepCrossValidation() rmsCVErr = math.sqrt(tssCVErr / totalCVCount) self.Print("CV #%6d | TSS Error: %.4f | Correct: %.4f | RMS Error: %.4f" % \ (self.epoch-1, tssCVErr, totalCVCorrect * 1.0 / totalCVCount, rmsCVErr)) if self.autoSaveWeightsFile != None and tssCVErr < self.lastLowestTSSError: self.lastLowestTSSError = tssCVErr self.saveWeights(self.autoSaveWeightsFile, mode = self.autoSaveWeightsFileFormat) self.Print("auto saving weights to '%s'..." % self.lastAutoSaveWeightsFilename) if self.autoSaveNetworkFile != None: self.saveNetwork(self.autoSaveNetworkFile, mode = self.autoSaveNetworkFileFormat) self.Print("auto saving network to '%s'..." % self.lastAutoSaveNetworkFilename) else: print("Final: nothing done")
Trains the network on the dataset till a stopping condition is met. This stopping condition can be a limiting epoch or a percentage correct requirement.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L1683-L1755
Calysto/calysto
calysto/ai/conx.py
Network.step
def step(self, **args): """ Network.step() Does a single step. Calls propagate(), backprop(), and change_weights() if learning is set. Format for parameters: <layer name> = <activation/target list> """ if self.verbosity > 0: print("Network.step() called with:", args) # First, copy the values into either activations or targets: retargs = self.preStep(**args) if retargs: args = retargs # replace the args # Propagate activation through network: self.propagate(**args) retargs = self.postPropagate(**args) if retargs: args = retargs # replace the args # Next, take care of any Auto-association, and copy # activations to targets for aa in self.association: (inName, outName) = aa inLayer = self.getLayer(inName) if inLayer.type not in ['Input', "Hidden"]: raise LayerError('Associated input layer not type \'Input\' or \'Hidden\'.', \ inLayer.type) outLayer = self.getLayer(outName) if not outLayer.type == 'Output': raise LayerError('Associated output layer not type \'Output\'.', \ outLayer.type) outLayer.copyTargets(inLayer.activation) # Compute error, and back prop it: retargs = self.preBackprop(**args) if retargs: args = retargs # replace the args (error, correct, total, pcorrect) = self.backprop(**args) # compute_error() if self.verbosity > 2 or self.interactive: self.display() if self.interactive: self.prompt() retargs = self.postBackprop(**args) if retargs: args = retargs # replace the args # if learning is true, and need to update weights here: if self.learning and not self.batch: self.change_weights() # else change weights in sweep retargs = self.postStep(**args) if retargs: args = retargs # replace the args self.reportPattern() return (error, correct, total, pcorrect)
python
def step(self, **args): """ Network.step() Does a single step. Calls propagate(), backprop(), and change_weights() if learning is set. Format for parameters: <layer name> = <activation/target list> """ if self.verbosity > 0: print("Network.step() called with:", args) # First, copy the values into either activations or targets: retargs = self.preStep(**args) if retargs: args = retargs # replace the args # Propagate activation through network: self.propagate(**args) retargs = self.postPropagate(**args) if retargs: args = retargs # replace the args # Next, take care of any Auto-association, and copy # activations to targets for aa in self.association: (inName, outName) = aa inLayer = self.getLayer(inName) if inLayer.type not in ['Input', "Hidden"]: raise LayerError('Associated input layer not type \'Input\' or \'Hidden\'.', \ inLayer.type) outLayer = self.getLayer(outName) if not outLayer.type == 'Output': raise LayerError('Associated output layer not type \'Output\'.', \ outLayer.type) outLayer.copyTargets(inLayer.activation) # Compute error, and back prop it: retargs = self.preBackprop(**args) if retargs: args = retargs # replace the args (error, correct, total, pcorrect) = self.backprop(**args) # compute_error() if self.verbosity > 2 or self.interactive: self.display() if self.interactive: self.prompt() retargs = self.postBackprop(**args) if retargs: args = retargs # replace the args # if learning is true, and need to update weights here: if self.learning and not self.batch: self.change_weights() # else change weights in sweep retargs = self.postStep(**args) if retargs: args = retargs # replace the args self.reportPattern() return (error, correct, total, pcorrect)
Network.step() Does a single step. Calls propagate(), backprop(), and change_weights() if learning is set. Format for parameters: <layer name> = <activation/target list>
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L1766-L1812
Calysto/calysto
calysto/ai/conx.py
Network.sweep
def sweep(self): """ Runs through entire dataset. Returns TSS error, total correct, total count, pcorrect (a dict of layer data) """ self.preSweep() if self.loadOrder == []: raise NetworkError('No loadOrder for the inputs. Make sure inputs are properly set.', self.loadOrder) if len(self.targets) != 0 and len(self.targets) != len(self.inputs): raise NetworkError("Number of inputs does not equal number of targets (inputs=%d, targets=%d)" % (len(self.targets), len(self.inputs))) if self.verbosity >= 1: print("Epoch #", self.epoch, "Cycle...") if not self.orderedInputs: self.randomizeOrder() tssError = 0.0; totalCorrect = 0; totalCount = 0; totalPCorrect = {} cnt = 0 if self.saveResults: self.results = [(0,0,0) for x in self.loadOrder] for i in self.loadOrder: if self.verbosity >= 1 or self.interactive: print("-----------------------------------Pattern #", self.loadOrder[i] + 1) datum = self.getData(i) # creates a dictionary of input/targets from self.inputs, self.targets if cnt < len(self.loadOrder) - 1: self.currentSweepCount = cnt else: self.currentSweepCount = None self._sweeping = 1 (error, correct, total, pcorrect) = self.step( **datum ) self._sweeping = 0 if self.saveResults: self.results[i] = (error, correct, total, pcorrect) tssError += error totalCorrect += correct totalCount += total sumMerge(totalPCorrect, pcorrect) if self.sweepReportRate and (cnt + 1) % self.sweepReportRate == 0: print(" Step # %6d | TSS Error: %.4f | Correct: %.4f" % \ (cnt + 1, tssError, totalCorrect * 1.0 / totalCount)) if self.crossValidationSampleRate and self.epoch % self.crossValidationSampleRate == 0: self.saveNetworkForCrossValidation(self.crossValidationSampleFile) cnt += 1 if self.learning and self.batch: self.change_weights() # batch mode, otherwise change weights in step self.postSweep() return (tssError, totalCorrect, totalCount, totalPCorrect)
python
def sweep(self): """ Runs through entire dataset. Returns TSS error, total correct, total count, pcorrect (a dict of layer data) """ self.preSweep() if self.loadOrder == []: raise NetworkError('No loadOrder for the inputs. Make sure inputs are properly set.', self.loadOrder) if len(self.targets) != 0 and len(self.targets) != len(self.inputs): raise NetworkError("Number of inputs does not equal number of targets (inputs=%d, targets=%d)" % (len(self.targets), len(self.inputs))) if self.verbosity >= 1: print("Epoch #", self.epoch, "Cycle...") if not self.orderedInputs: self.randomizeOrder() tssError = 0.0; totalCorrect = 0; totalCount = 0; totalPCorrect = {} cnt = 0 if self.saveResults: self.results = [(0,0,0) for x in self.loadOrder] for i in self.loadOrder: if self.verbosity >= 1 or self.interactive: print("-----------------------------------Pattern #", self.loadOrder[i] + 1) datum = self.getData(i) # creates a dictionary of input/targets from self.inputs, self.targets if cnt < len(self.loadOrder) - 1: self.currentSweepCount = cnt else: self.currentSweepCount = None self._sweeping = 1 (error, correct, total, pcorrect) = self.step( **datum ) self._sweeping = 0 if self.saveResults: self.results[i] = (error, correct, total, pcorrect) tssError += error totalCorrect += correct totalCount += total sumMerge(totalPCorrect, pcorrect) if self.sweepReportRate and (cnt + 1) % self.sweepReportRate == 0: print(" Step # %6d | TSS Error: %.4f | Correct: %.4f" % \ (cnt + 1, tssError, totalCorrect * 1.0 / totalCount)) if self.crossValidationSampleRate and self.epoch % self.crossValidationSampleRate == 0: self.saveNetworkForCrossValidation(self.crossValidationSampleFile) cnt += 1 if self.learning and self.batch: self.change_weights() # batch mode, otherwise change weights in step self.postSweep() return (tssError, totalCorrect, totalCount, totalPCorrect)
Runs through entire dataset. Returns TSS error, total correct, total count, pcorrect (a dict of layer data)
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L1822-L1865
Calysto/calysto
calysto/ai/conx.py
Network.sweepCrossValidation
def sweepCrossValidation(self): """ sweepCrossValidation() will go through each of the crossvalidation input/targets. The crossValidationCorpus is a list of dictionaries of input/targets referenced by layername. Example: ({"input": [0.0, 0.1], "output": [1.0]}, {"input": [0.5, 0.9], "output": [0.0]}) """ # get learning value and then turn it off oldLearning = self.learning self.learning = 0 tssError = 0.0; totalCorrect = 0; totalCount = 0; totalPCorrect = {} self._cv = True # in cross validation if self.autoCrossValidation: for i in range(len(self.inputs)): set = self.getDataCrossValidation(i) self._sweeping = 1 (error, correct, total, pcorrect) = self.step( **set ) self._sweeping = 0 if self.crossValidationReportLayers != []: (error, correct, total, pcorrect) = self.getError( *self.crossValidationReportLayers ) tssError += error totalCorrect += correct totalCount += total sumMerge(totalPCorrect, pcorrect) else: for set in self.crossValidationCorpus: self._sweeping = 1 (error, correct, total, pcorrect) = self.step( **set ) self._sweeping = 0 if self.crossValidationReportLayers != []: (error, correct, total, pcorrect) = self.getError( *self.crossValidationReportLayers ) tssError += error totalCorrect += correct totalCount += total sumMerge(totalPCorrect, pcorrect) self.learning = oldLearning self._cv = False return (tssError, totalCorrect, totalCount, totalPCorrect)
python
def sweepCrossValidation(self): """ sweepCrossValidation() will go through each of the crossvalidation input/targets. The crossValidationCorpus is a list of dictionaries of input/targets referenced by layername. Example: ({"input": [0.0, 0.1], "output": [1.0]}, {"input": [0.5, 0.9], "output": [0.0]}) """ # get learning value and then turn it off oldLearning = self.learning self.learning = 0 tssError = 0.0; totalCorrect = 0; totalCount = 0; totalPCorrect = {} self._cv = True # in cross validation if self.autoCrossValidation: for i in range(len(self.inputs)): set = self.getDataCrossValidation(i) self._sweeping = 1 (error, correct, total, pcorrect) = self.step( **set ) self._sweeping = 0 if self.crossValidationReportLayers != []: (error, correct, total, pcorrect) = self.getError( *self.crossValidationReportLayers ) tssError += error totalCorrect += correct totalCount += total sumMerge(totalPCorrect, pcorrect) else: for set in self.crossValidationCorpus: self._sweeping = 1 (error, correct, total, pcorrect) = self.step( **set ) self._sweeping = 0 if self.crossValidationReportLayers != []: (error, correct, total, pcorrect) = self.getError( *self.crossValidationReportLayers ) tssError += error totalCorrect += correct totalCount += total sumMerge(totalPCorrect, pcorrect) self.learning = oldLearning self._cv = False return (tssError, totalCorrect, totalCount, totalPCorrect)
sweepCrossValidation() will go through each of the crossvalidation input/targets. The crossValidationCorpus is a list of dictionaries of input/targets referenced by layername. Example: ({"input": [0.0, 0.1], "output": [1.0]}, {"input": [0.5, 0.9], "output": [0.0]})
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L1866-L1903
Calysto/calysto
calysto/ai/conx.py
Network.numConnects
def numConnects(self, layerName): """ Number of incoming weights, including bias. Assumes fully connected. """ count = 0 if self[layerName].active: count += 1 # 1 = bias for connection in self.connections: if connection.active and connection.fromLayer.active and connection.toLayer.name == layerName: count += connection.fromLayer.size return count
python
def numConnects(self, layerName): """ Number of incoming weights, including bias. Assumes fully connected. """ count = 0 if self[layerName].active: count += 1 # 1 = bias for connection in self.connections: if connection.active and connection.fromLayer.active and connection.toLayer.name == layerName: count += connection.fromLayer.size return count
Number of incoming weights, including bias. Assumes fully connected.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L1933-L1941
Calysto/calysto
calysto/ai/conx.py
Network.prop_from
def prop_from(self, startLayers): """ Start propagation from the layers in the list startLayers. Make sure startLayers are initialized with the desired activations. NO ERROR CHECKING. """ if self.verbosity > 2: print("Partially propagating network:") # find all the layers involved in the propagation propagateLayers = [] # propagateLayers should not include startLayers (no loops) for startLayer in startLayers: for layer in self.layers: if self.path(startLayer, layer): propagateLayers.append(layer) for layer in propagateLayers: if layer.active: layer.netinput = (layer.weight).copy() for layer in propagateLayers: if layer.active: for connection in self.connections: if connection.active and connection.toLayer.name == layer.name: connection.toLayer.netinput = connection.toLayer.netinput + \ Numeric.matrixmultiply(connection.fromLayer.activation,\ connection.weight) # propagate! if layer.type != 'Input': layer.activation = self.activationFunction(layer.netinput) for layer in propagateLayers: if layer.log and layer.active: layer.writeLog(self)
python
def prop_from(self, startLayers): """ Start propagation from the layers in the list startLayers. Make sure startLayers are initialized with the desired activations. NO ERROR CHECKING. """ if self.verbosity > 2: print("Partially propagating network:") # find all the layers involved in the propagation propagateLayers = [] # propagateLayers should not include startLayers (no loops) for startLayer in startLayers: for layer in self.layers: if self.path(startLayer, layer): propagateLayers.append(layer) for layer in propagateLayers: if layer.active: layer.netinput = (layer.weight).copy() for layer in propagateLayers: if layer.active: for connection in self.connections: if connection.active and connection.toLayer.name == layer.name: connection.toLayer.netinput = connection.toLayer.netinput + \ Numeric.matrixmultiply(connection.fromLayer.activation,\ connection.weight) # propagate! if layer.type != 'Input': layer.activation = self.activationFunction(layer.netinput) for layer in propagateLayers: if layer.log and layer.active: layer.writeLog(self)
Start propagation from the layers in the list startLayers. Make sure startLayers are initialized with the desired activations. NO ERROR CHECKING.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L1944-L1972
Calysto/calysto
calysto/ai/conx.py
Network.propagate
def propagate(self, **args): """ Propagates activation through the network. Optionally, takes input layer names as keywords, and their associated activations. If input layer(s) are given, then propagate() will return the output layer's activation. If there is more than one output layer, then a dictionary is returned. Examples: >>> net = Network() # doctest: +ELLIPSIS Conx using seed: ... >>> net.addLayers(2, 5, 1) >>> len(net.propagate(input = [0, .5])) 1 """ self.prePropagate(**args) for key in args: layer = self.getLayer(key) if layer.kind == 'Input': if self[key].verify and not self[key].activationSet == 0: raise AttributeError("attempt to set activations on input layer '%s' without reset" % key) self.copyActivations(layer, args[key]) elif layer.kind == 'Context': self.copyActivations(layer, args[key]) elif layer.kind == 'Output' and len(args[key]) == layer.size: # in case you expect propagate to handle the outputs self.copyTargets(layer, args[key]) self.verifyInputs() # better have inputs set if self.verbosity > 2: print("Propagate Network '" + self.name + "':") # initialize netinput: for layer in self.layers: if layer.type != 'Input' and layer.active: layer.netinput = (layer.weight).copy() # for each connection, in order: for layer in self.layers: if layer.active: for connection in self.connections: if (connection.toLayer.name == layer.name and connection.fromLayer.active and connection.active): a = connection.fromLayer.activation w = connection.weight m = Numeric.matrixmultiply(a, w) ni = m + connection.toLayer.netinput connection.toLayer.netinput = ni #connection.toLayer.netinput = \ # (connection.toLayer.netinput + # Numeric.matrixmultiply(connection.fromLayer.activation, # connection.weight)) # propagate! if layer.type != 'Input': layer.activation = self.activationFunction(layer.netinput) for layer in self.layers: if layer.log and layer.active: layer.writeLog(self) self.count += 1 # counts number of times propagate() is called if len(args) != 0: dict = {} for layer in self.layers: if layer.type == "Output": dict[layer.name] = layer.activation.copy() if len(dict) == 1: return dict[list(dict.keys())[0]] else: return dict
python
def propagate(self, **args): """ Propagates activation through the network. Optionally, takes input layer names as keywords, and their associated activations. If input layer(s) are given, then propagate() will return the output layer's activation. If there is more than one output layer, then a dictionary is returned. Examples: >>> net = Network() # doctest: +ELLIPSIS Conx using seed: ... >>> net.addLayers(2, 5, 1) >>> len(net.propagate(input = [0, .5])) 1 """ self.prePropagate(**args) for key in args: layer = self.getLayer(key) if layer.kind == 'Input': if self[key].verify and not self[key].activationSet == 0: raise AttributeError("attempt to set activations on input layer '%s' without reset" % key) self.copyActivations(layer, args[key]) elif layer.kind == 'Context': self.copyActivations(layer, args[key]) elif layer.kind == 'Output' and len(args[key]) == layer.size: # in case you expect propagate to handle the outputs self.copyTargets(layer, args[key]) self.verifyInputs() # better have inputs set if self.verbosity > 2: print("Propagate Network '" + self.name + "':") # initialize netinput: for layer in self.layers: if layer.type != 'Input' and layer.active: layer.netinput = (layer.weight).copy() # for each connection, in order: for layer in self.layers: if layer.active: for connection in self.connections: if (connection.toLayer.name == layer.name and connection.fromLayer.active and connection.active): a = connection.fromLayer.activation w = connection.weight m = Numeric.matrixmultiply(a, w) ni = m + connection.toLayer.netinput connection.toLayer.netinput = ni #connection.toLayer.netinput = \ # (connection.toLayer.netinput + # Numeric.matrixmultiply(connection.fromLayer.activation, # connection.weight)) # propagate! if layer.type != 'Input': layer.activation = self.activationFunction(layer.netinput) for layer in self.layers: if layer.log and layer.active: layer.writeLog(self) self.count += 1 # counts number of times propagate() is called if len(args) != 0: dict = {} for layer in self.layers: if layer.type == "Output": dict[layer.name] = layer.activation.copy() if len(dict) == 1: return dict[list(dict.keys())[0]] else: return dict
Propagates activation through the network. Optionally, takes input layer names as keywords, and their associated activations. If input layer(s) are given, then propagate() will return the output layer's activation. If there is more than one output layer, then a dictionary is returned. Examples: >>> net = Network() # doctest: +ELLIPSIS Conx using seed: ... >>> net.addLayers(2, 5, 1) >>> len(net.propagate(input = [0, .5])) 1
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L1973-L2039
Calysto/calysto
calysto/ai/conx.py
Network.propagateTo
def propagateTo(self, toLayer, **args): """ Propagates activation to a layer. Optionally, takes input layer names as keywords, and their associated activations. Returns the toLayer's activation. Examples: >>> net = Network() # doctest: +ELLIPSIS Conx using seed: ... >>> net.addLayers(2, 5, 1) >>> len(net.propagateTo("output")) 1 >>> len(net.propagateTo("hidden")) 5 >>> len(net.propagateTo("hidden", input = [0, 0])) 5 """ for layerName in args: self[layerName].activationSet = 0 # force it to be ok self[layerName].copyActivations(args[layerName]) # init toLayer: self[toLayer].netinput = (self[toLayer].weight).copy() # for each connection, in order: for connection in self.connections: if connection.active and connection.toLayer.name == toLayer and connection.fromLayer.active: connection.toLayer.netinput = connection.toLayer.netinput + \ Numeric.matrixmultiply(connection.fromLayer.activation,\ connection.weight) # propagate! if self[toLayer].type != 'Input': self[toLayer].activation = self.activationFunction(self[toLayer].netinput) return self[toLayer].activation.copy()
python
def propagateTo(self, toLayer, **args): """ Propagates activation to a layer. Optionally, takes input layer names as keywords, and their associated activations. Returns the toLayer's activation. Examples: >>> net = Network() # doctest: +ELLIPSIS Conx using seed: ... >>> net.addLayers(2, 5, 1) >>> len(net.propagateTo("output")) 1 >>> len(net.propagateTo("hidden")) 5 >>> len(net.propagateTo("hidden", input = [0, 0])) 5 """ for layerName in args: self[layerName].activationSet = 0 # force it to be ok self[layerName].copyActivations(args[layerName]) # init toLayer: self[toLayer].netinput = (self[toLayer].weight).copy() # for each connection, in order: for connection in self.connections: if connection.active and connection.toLayer.name == toLayer and connection.fromLayer.active: connection.toLayer.netinput = connection.toLayer.netinput + \ Numeric.matrixmultiply(connection.fromLayer.activation,\ connection.weight) # propagate! if self[toLayer].type != 'Input': self[toLayer].activation = self.activationFunction(self[toLayer].netinput) return self[toLayer].activation.copy()
Propagates activation to a layer. Optionally, takes input layer names as keywords, and their associated activations. Returns the toLayer's activation. Examples: >>> net = Network() # doctest: +ELLIPSIS Conx using seed: ... >>> net.addLayers(2, 5, 1) >>> len(net.propagateTo("output")) 1 >>> len(net.propagateTo("hidden")) 5 >>> len(net.propagateTo("hidden", input = [0, 0])) 5
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L2040-L2070
Calysto/calysto
calysto/ai/conx.py
Network.propagateFrom
def propagateFrom(self, startLayer, **args): """ Propagates activation through the network. Optionally, takes input layer names as keywords, and their associated activations. If input layer(s) are given, then propagate() will return the output layer's activation. If there is more than one output layer, then a dictionary is returned. Examples: >>> net = Network() # doctest: +ELLIPSIS Conx using seed: ... >>> net.addLayers(2, 5, 1) >>> len(net.propagate(input = [1, .5])) 1 """ for layerName in args: self[layerName].copyActivations(args[layerName]) # initialize netinput: started = 0 for layer in self.layers: if layer.name == startLayer: started = 1 continue # don't set this one if not started: continue if layer.type != 'Input' and layer.active: layer.netinput = (layer.weight).copy() # for each connection, in order: started = 0 for layer in self.layers: if layer.name == startLayer: started = 1 continue # don't get inputs into this one if not started: continue if layer.active: for connection in self.connections: if connection.active and connection.toLayer.name == layer.name and connection.fromLayer.active: connection.toLayer.netinput = connection.toLayer.netinput + \ Numeric.matrixmultiply(connection.fromLayer.activation,\ connection.weight) # propagate! if layer.type != 'Input': layer.activation = self.activationFunction(layer.netinput) for layer in self.layers: if layer.log and layer.active: layer.writeLog(self) self.count += 1 # counts number of times propagate() is called if len(args) != 0: dict = {} for layer in self.layers: if layer.type == "Output": dict[layer.name] = layer.activation.copy() if len(dict) == 1: return dict[list(dict.keys())[0]] else: return dict
python
def propagateFrom(self, startLayer, **args): """ Propagates activation through the network. Optionally, takes input layer names as keywords, and their associated activations. If input layer(s) are given, then propagate() will return the output layer's activation. If there is more than one output layer, then a dictionary is returned. Examples: >>> net = Network() # doctest: +ELLIPSIS Conx using seed: ... >>> net.addLayers(2, 5, 1) >>> len(net.propagate(input = [1, .5])) 1 """ for layerName in args: self[layerName].copyActivations(args[layerName]) # initialize netinput: started = 0 for layer in self.layers: if layer.name == startLayer: started = 1 continue # don't set this one if not started: continue if layer.type != 'Input' and layer.active: layer.netinput = (layer.weight).copy() # for each connection, in order: started = 0 for layer in self.layers: if layer.name == startLayer: started = 1 continue # don't get inputs into this one if not started: continue if layer.active: for connection in self.connections: if connection.active and connection.toLayer.name == layer.name and connection.fromLayer.active: connection.toLayer.netinput = connection.toLayer.netinput + \ Numeric.matrixmultiply(connection.fromLayer.activation,\ connection.weight) # propagate! if layer.type != 'Input': layer.activation = self.activationFunction(layer.netinput) for layer in self.layers: if layer.log and layer.active: layer.writeLog(self) self.count += 1 # counts number of times propagate() is called if len(args) != 0: dict = {} for layer in self.layers: if layer.type == "Output": dict[layer.name] = layer.activation.copy() if len(dict) == 1: return dict[list(dict.keys())[0]] else: return dict
Propagates activation through the network. Optionally, takes input layer names as keywords, and their associated activations. If input layer(s) are given, then propagate() will return the output layer's activation. If there is more than one output layer, then a dictionary is returned. Examples: >>> net = Network() # doctest: +ELLIPSIS Conx using seed: ... >>> net.addLayers(2, 5, 1) >>> len(net.propagate(input = [1, .5])) 1
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L2071-L2124
Calysto/calysto
calysto/ai/conx.py
Network.activationFunctionASIG
def activationFunctionASIG(self, x): """ Determine the activation of a node based on that nodes net input. """ def act(v): if v < -15.0: return 0.0 elif v > 15.0: return 1.0 else: return 1.0 / (1.0 + Numeric.exp(-v)) return Numeric.array(list(map(act, x)), 'f')
python
def activationFunctionASIG(self, x): """ Determine the activation of a node based on that nodes net input. """ def act(v): if v < -15.0: return 0.0 elif v > 15.0: return 1.0 else: return 1.0 / (1.0 + Numeric.exp(-v)) return Numeric.array(list(map(act, x)), 'f')
Determine the activation of a node based on that nodes net input.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L2128-L2136
Calysto/calysto
calysto/ai/conx.py
Network.actDerivASIG
def actDerivASIG(self, x): """ Only works on scalars. """ def act(v): if v < -15.0: return 0.0 elif v > 15.0: return 1.0 else: return 1.0 / (1.0 + Numeric.exp(-v)) return (act(x) * (1.0 - act(x))) + self.sigmoid_prime_offset
python
def actDerivASIG(self, x): """ Only works on scalars. """ def act(v): if v < -15.0: return 0.0 elif v > 15.0: return 1.0 else: return 1.0 / (1.0 + Numeric.exp(-v)) return (act(x) * (1.0 - act(x))) + self.sigmoid_prime_offset
Only works on scalars.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L2143-L2151
Calysto/calysto
calysto/ai/conx.py
Network.useTanhActivationFunction
def useTanhActivationFunction(self): """ Change the network to use the hyperbolic tangent activation function for all layers. Must be called after all layers have been added. """ self.activationFunction = self.activationFunctionTANH self.ACTPRIME = self.ACTPRIMETANH self.actDeriv = self.actDerivTANH for layer in self: layer.minTarget, layer.minActivation = -1.7159, -1.7159 layer.maxTarget, layer.maxActivation = 1.7159, 1.7159
python
def useTanhActivationFunction(self): """ Change the network to use the hyperbolic tangent activation function for all layers. Must be called after all layers have been added. """ self.activationFunction = self.activationFunctionTANH self.ACTPRIME = self.ACTPRIMETANH self.actDeriv = self.actDerivTANH for layer in self: layer.minTarget, layer.minActivation = -1.7159, -1.7159 layer.maxTarget, layer.maxActivation = 1.7159, 1.7159
Change the network to use the hyperbolic tangent activation function for all layers. Must be called after all layers have been added.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L2186-L2196
Calysto/calysto
calysto/ai/conx.py
Network.useFahlmanActivationFunction
def useFahlmanActivationFunction(self): """ Change the network to use Fahlman's default activation function for all layers. Must be called after all layers have been added. """ self.activationFunction = self.activationFunctionFahlman self.ACTPRIME = self.ACTPRIME_Fahlman self.actDeriv = self.actDerivFahlman for layer in self: layer.minTarget, layer.minActivation = -0.5, -0.5 layer.maxTarget, layer.maxActivation = 0.5, 0.5
python
def useFahlmanActivationFunction(self): """ Change the network to use Fahlman's default activation function for all layers. Must be called after all layers have been added. """ self.activationFunction = self.activationFunctionFahlman self.ACTPRIME = self.ACTPRIME_Fahlman self.actDeriv = self.actDerivFahlman for layer in self: layer.minTarget, layer.minActivation = -0.5, -0.5 layer.maxTarget, layer.maxActivation = 0.5, 0.5
Change the network to use Fahlman's default activation function for all layers. Must be called after all layers have been added.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L2197-L2207
Calysto/calysto
calysto/ai/conx.py
Network.backprop
def backprop(self, **args): """ Computes error and wed for back propagation of error. """ retval = self.compute_error(**args) if self.learning: self.compute_wed() return retval
python
def backprop(self, **args): """ Computes error and wed for back propagation of error. """ retval = self.compute_error(**args) if self.learning: self.compute_wed() return retval
Computes error and wed for back propagation of error.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L2213-L2220
Calysto/calysto
calysto/ai/conx.py
Network.deltaWeight
def deltaWeight(self, e, wed, m, dweightLast, wedLast, w, n): """ e - learning rate wed - weight error delta vector (slope) m - momentum dweightLast - previous dweight vector (slope) wedLast - only used in quickprop; last weight error delta vector w - weight vector n - fan-in, number of connections coming in (counts bias, too) """ #print "WEIGHT = ", w shrinkFactor = self.mu / (1.0 + self.mu) if self.splitEpsilon: e /= float(n) if self._quickprop: nextStep = Numeric.zeros(len(dweightLast), 'f') for i in range(len(dweightLast)): s = wed[i] d = dweightLast[i] p = wedLast[i] #print self.mu #print "slopes[node] = %f QP w=%s d=%f s=%f p=%f eps=%f" % (s, w, d, s, p, e) #print type(nextStep[i]), nextStep[i] if (d > 0.0): if (s > 0.0): #print "CASE A1" nextStep[i] = nextStep[i] + (e * s) if (s >= (shrinkFactor * p)): #print "CASE B1" nextStep[i] = nextStep[i] + (self.mu * d) else: #print "CASE C1" nextStep[i] = nextStep[i] + (d * s / (p - s)) elif (d < 0.0): if (s < 0.0): #print "CASE A2" nextStep[i] = nextStep[i] + (e * s) if (s <= (shrinkFactor * p)): #print "CASE B2" nextStep[i] = nextStep[i] + (self.mu * d) else: #print "CASE C2" nextStep[i] = nextStep[i] + (d * s / (p - s)) else: #print "CASE D" nextStep[i] = nextStep[i] + (e * s + m * d) ## Last step was zero, so only use linear ## newDweight = nextStep #print "Next Step = ", nextStep[i] else: # backprop newDweight = e * wed + m * dweightLast # gradient descent return newDweight
python
def deltaWeight(self, e, wed, m, dweightLast, wedLast, w, n): """ e - learning rate wed - weight error delta vector (slope) m - momentum dweightLast - previous dweight vector (slope) wedLast - only used in quickprop; last weight error delta vector w - weight vector n - fan-in, number of connections coming in (counts bias, too) """ #print "WEIGHT = ", w shrinkFactor = self.mu / (1.0 + self.mu) if self.splitEpsilon: e /= float(n) if self._quickprop: nextStep = Numeric.zeros(len(dweightLast), 'f') for i in range(len(dweightLast)): s = wed[i] d = dweightLast[i] p = wedLast[i] #print self.mu #print "slopes[node] = %f QP w=%s d=%f s=%f p=%f eps=%f" % (s, w, d, s, p, e) #print type(nextStep[i]), nextStep[i] if (d > 0.0): if (s > 0.0): #print "CASE A1" nextStep[i] = nextStep[i] + (e * s) if (s >= (shrinkFactor * p)): #print "CASE B1" nextStep[i] = nextStep[i] + (self.mu * d) else: #print "CASE C1" nextStep[i] = nextStep[i] + (d * s / (p - s)) elif (d < 0.0): if (s < 0.0): #print "CASE A2" nextStep[i] = nextStep[i] + (e * s) if (s <= (shrinkFactor * p)): #print "CASE B2" nextStep[i] = nextStep[i] + (self.mu * d) else: #print "CASE C2" nextStep[i] = nextStep[i] + (d * s / (p - s)) else: #print "CASE D" nextStep[i] = nextStep[i] + (e * s + m * d) ## Last step was zero, so only use linear ## newDweight = nextStep #print "Next Step = ", nextStep[i] else: # backprop newDweight = e * wed + m * dweightLast # gradient descent return newDweight
e - learning rate wed - weight error delta vector (slope) m - momentum dweightLast - previous dweight vector (slope) wedLast - only used in quickprop; last weight error delta vector w - weight vector n - fan-in, number of connections coming in (counts bias, too)
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L2221-L2272
Calysto/calysto
calysto/ai/conx.py
Network.change_weights
def change_weights(self): """ Changes the weights according to the error values calculated during backprop(). Learning must be set. """ dw_count, dw_sum = 0, 0.0 if len(self.cacheLayers) != 0: changeLayers = self.cacheLayers else: changeLayers = self.layers for layer in changeLayers: if layer.active and layer.type != 'Input': if not layer.frozen: if self._quickprop or self.splitEpsilon: layer.dweight = self.deltaWeight(self.epsilon, layer.wed, self.momentum, layer.dweight, layer.wedLast, layer.weight, self.numConnects(layer.name)) else: layer.dweight = self.epsilon * layer.wed + self.momentum * layer.dweight layer.weight += layer.dweight #print "layer.wed = ",layer.wed #print "layer.weight = ",layer.weight," layer.dweight = ",layer.dweight layer.wedLast = Numeric.array(layer.wed) # make copy if self._quickprop: layer.wed = layer.weight * self.decay # reset to last weight, with decay else: layer.wed = layer.wed * 0.0 # keep same numeric type, just zero it dw_count += len(layer.dweight) dw_sum += Numeric.add.reduce(abs(layer.dweight)) if len(self.cacheConnections) != 0: changeConnections = self.cacheConnections else: changeConnections = self.connections for connection in changeConnections: if (connection.active and connection.fromLayer.active and connection.toLayer.active and not connection.frozen): toLayer = connection.toLayer if self._quickprop or self.splitEpsilon: # doing it one vector at a time, to match layer bias training (a quickprop abstraction) for i in range(len(connection.dweight)): Numeric.put(connection.dweight[i], Numeric.arange(len(connection.dweight[i])), self.deltaWeight(self.epsilon, connection.wed[i], self.momentum, connection.dweight[i], connection.wedLast[i], connection.weight[i], self.numConnects(connection.toLayer.name))) else: connection.dweight = self.epsilon * connection.wed + self.momentum * connection.dweight connection.weight += connection.dweight #print "connection.wed = ",connection.wed #print "connection.weight = ",connection.weight," connection.dweight = ",connection.dweight # reset values: connection.wedLast = Numeric.array(connection.wed) # make copy if self._quickprop: connection.wed = connection.weight * self.decay else: connection.wed = connection.wed * 0.0 # keeps the same Numeric type, but makes it zero # get some stats dw_count += Numeric.multiply.reduce(connection.dweight.shape) dw_sum += Numeric.add.reduce(Numeric.add.reduce(abs(connection.dweight))) if self.verbosity >= 1: print("WEIGHTS CHANGED") if self.verbosity > 2: self.display() return (dw_count, dw_sum)
python
def change_weights(self): """ Changes the weights according to the error values calculated during backprop(). Learning must be set. """ dw_count, dw_sum = 0, 0.0 if len(self.cacheLayers) != 0: changeLayers = self.cacheLayers else: changeLayers = self.layers for layer in changeLayers: if layer.active and layer.type != 'Input': if not layer.frozen: if self._quickprop or self.splitEpsilon: layer.dweight = self.deltaWeight(self.epsilon, layer.wed, self.momentum, layer.dweight, layer.wedLast, layer.weight, self.numConnects(layer.name)) else: layer.dweight = self.epsilon * layer.wed + self.momentum * layer.dweight layer.weight += layer.dweight #print "layer.wed = ",layer.wed #print "layer.weight = ",layer.weight," layer.dweight = ",layer.dweight layer.wedLast = Numeric.array(layer.wed) # make copy if self._quickprop: layer.wed = layer.weight * self.decay # reset to last weight, with decay else: layer.wed = layer.wed * 0.0 # keep same numeric type, just zero it dw_count += len(layer.dweight) dw_sum += Numeric.add.reduce(abs(layer.dweight)) if len(self.cacheConnections) != 0: changeConnections = self.cacheConnections else: changeConnections = self.connections for connection in changeConnections: if (connection.active and connection.fromLayer.active and connection.toLayer.active and not connection.frozen): toLayer = connection.toLayer if self._quickprop or self.splitEpsilon: # doing it one vector at a time, to match layer bias training (a quickprop abstraction) for i in range(len(connection.dweight)): Numeric.put(connection.dweight[i], Numeric.arange(len(connection.dweight[i])), self.deltaWeight(self.epsilon, connection.wed[i], self.momentum, connection.dweight[i], connection.wedLast[i], connection.weight[i], self.numConnects(connection.toLayer.name))) else: connection.dweight = self.epsilon * connection.wed + self.momentum * connection.dweight connection.weight += connection.dweight #print "connection.wed = ",connection.wed #print "connection.weight = ",connection.weight," connection.dweight = ",connection.dweight # reset values: connection.wedLast = Numeric.array(connection.wed) # make copy if self._quickprop: connection.wed = connection.weight * self.decay else: connection.wed = connection.wed * 0.0 # keeps the same Numeric type, but makes it zero # get some stats dw_count += Numeric.multiply.reduce(connection.dweight.shape) dw_sum += Numeric.add.reduce(Numeric.add.reduce(abs(connection.dweight))) if self.verbosity >= 1: print("WEIGHTS CHANGED") if self.verbosity > 2: self.display() return (dw_count, dw_sum)
Changes the weights according to the error values calculated during backprop(). Learning must be set.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L2273-L2346
Calysto/calysto
calysto/ai/conx.py
Network.errorFunction
def errorFunction(self, t, a): """ Using a hyperbolic arctan on the error slightly exaggerates the actual error non-linearly. Return t - a to just use the difference. t - target vector a - activation vector """ def difference(v): if not self.hyperbolicError: #if -0.1 < v < 0.1: return 0.0 #else: return v else: if v < -0.9999999: return -17.0 elif v > 0.9999999: return 17.0 else: return math.log( (1.0 + v) / (1.0 - v) ) #else: return Numeric.arctanh(v) # half that above return list(map(difference, t - a))
python
def errorFunction(self, t, a): """ Using a hyperbolic arctan on the error slightly exaggerates the actual error non-linearly. Return t - a to just use the difference. t - target vector a - activation vector """ def difference(v): if not self.hyperbolicError: #if -0.1 < v < 0.1: return 0.0 #else: return v else: if v < -0.9999999: return -17.0 elif v > 0.9999999: return 17.0 else: return math.log( (1.0 + v) / (1.0 - v) ) #else: return Numeric.arctanh(v) # half that above return list(map(difference, t - a))
Using a hyperbolic arctan on the error slightly exaggerates the actual error non-linearly. Return t - a to just use the difference. t - target vector a - activation vector
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L2348-L2365
Calysto/calysto
calysto/ai/conx.py
Network.ce_init
def ce_init(self): """ Initializes error computation. Calculates error for output layers and initializes hidden layer error to zero. """ retval = 0.0; correct = 0; totalCount = 0 for layer in self.layers: if layer.active: if layer.type == 'Output': layer.error = self.errorFunction(layer.target, layer.activation) totalCount += layer.size retval += Numeric.add.reduce((layer.target - layer.activation) ** 2) correct += Numeric.add.reduce(Numeric.fabs(layer.target - layer.activation) < self.tolerance) elif (layer.type == 'Hidden'): for i in range(layer.size): # do it this way so you don't break reference links layer.error[i] = 0.0 return (retval, correct, totalCount)
python
def ce_init(self): """ Initializes error computation. Calculates error for output layers and initializes hidden layer error to zero. """ retval = 0.0; correct = 0; totalCount = 0 for layer in self.layers: if layer.active: if layer.type == 'Output': layer.error = self.errorFunction(layer.target, layer.activation) totalCount += layer.size retval += Numeric.add.reduce((layer.target - layer.activation) ** 2) correct += Numeric.add.reduce(Numeric.fabs(layer.target - layer.activation) < self.tolerance) elif (layer.type == 'Hidden'): for i in range(layer.size): # do it this way so you don't break reference links layer.error[i] = 0.0 return (retval, correct, totalCount)
Initializes error computation. Calculates error for output layers and initializes hidden layer error to zero.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L2367-L2383
Calysto/calysto
calysto/ai/conx.py
Network.compute_error
def compute_error(self, **args): """ Computes error for all non-output layers backwards through all projections. """ for key in args: layer = self.getLayer(key) if layer.kind == 'Output': self.copyTargets(layer, args[key]) self.verifyTargets() # better have targets set error, correct, total = self.ce_init() pcorrect = {} # go backwards through each proj but don't redo output errors! if len(self.cacheConnections) != 0: changeConnections = self.cacheConnections else: changeConnections = self.connections for connect in reverse(changeConnections): if connect.active and connect.toLayer.active and connect.fromLayer.active: connect.toLayer.delta = (connect.toLayer.error * (self.ACTPRIME(connect.toLayer.activation))) connect.fromLayer.error = connect.fromLayer.error + \ Numeric.matrixmultiply(connect.weight, connect.toLayer.delta) # now all errors are set on all layers! pcorrect = self.getLayerErrors() return (error, correct, total, pcorrect)
python
def compute_error(self, **args): """ Computes error for all non-output layers backwards through all projections. """ for key in args: layer = self.getLayer(key) if layer.kind == 'Output': self.copyTargets(layer, args[key]) self.verifyTargets() # better have targets set error, correct, total = self.ce_init() pcorrect = {} # go backwards through each proj but don't redo output errors! if len(self.cacheConnections) != 0: changeConnections = self.cacheConnections else: changeConnections = self.connections for connect in reverse(changeConnections): if connect.active and connect.toLayer.active and connect.fromLayer.active: connect.toLayer.delta = (connect.toLayer.error * (self.ACTPRIME(connect.toLayer.activation))) connect.fromLayer.error = connect.fromLayer.error + \ Numeric.matrixmultiply(connect.weight, connect.toLayer.delta) # now all errors are set on all layers! pcorrect = self.getLayerErrors() return (error, correct, total, pcorrect)
Computes error for all non-output layers backwards through all projections.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L2384-L2409
Calysto/calysto
calysto/ai/conx.py
Network.compute_wed
def compute_wed(self): """ Computes weight error derivative for all connections in self.connections starting with the last connection. """ if len(self.cacheConnections) != 0: changeConnections = self.cacheConnections else: changeConnections = self.connections for connect in reverse(changeConnections): if connect.active and connect.fromLayer.active and connect.toLayer.active: connect.wed = connect.wed + Numeric.outerproduct(connect.fromLayer.activation, connect.toLayer.delta) if len(self.cacheLayers) != 0: changeLayers = self.cacheLayers else: changeLayers = self.layers for layer in changeLayers: if layer.active: layer.wed = layer.wed + layer.delta
python
def compute_wed(self): """ Computes weight error derivative for all connections in self.connections starting with the last connection. """ if len(self.cacheConnections) != 0: changeConnections = self.cacheConnections else: changeConnections = self.connections for connect in reverse(changeConnections): if connect.active and connect.fromLayer.active and connect.toLayer.active: connect.wed = connect.wed + Numeric.outerproduct(connect.fromLayer.activation, connect.toLayer.delta) if len(self.cacheLayers) != 0: changeLayers = self.cacheLayers else: changeLayers = self.layers for layer in changeLayers: if layer.active: layer.wed = layer.wed + layer.delta
Computes weight error derivative for all connections in self.connections starting with the last connection.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L2438-L2457
Calysto/calysto
calysto/ai/conx.py
Network.toString
def toString(self): """ Returns the network layers as a string. """ output = "" for layer in reverse(self.layers): output += layer.toString() return output
python
def toString(self): """ Returns the network layers as a string. """ output = "" for layer in reverse(self.layers): output += layer.toString() return output
Returns the network layers as a string.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L2469-L2476
Calysto/calysto
calysto/ai/conx.py
Network.display
def display(self): """ Displays the network to the screen. """ print("Display network '" + self.name + "':") size = list(range(len(self.layers))) size.reverse() for i in size: if self.layers[i].active: self.layers[i].display() if self.patterned and self.layers[i].type != 'Hidden': targetWord, diff = self.getWord( self.layers[i].target, returnDiff = 1) if self.layers[i].kind == 'Output': if targetWord == None: print("Target Pattern = %s" % "No match") else: if diff == 0.0: print("Target Pattern = '%s'; (exact)" % targetWord) else: print("Target Pattern = '%s'; Match difference = %f)" % (targetWord, diff)) actWord, diff = self.getWord( self.layers[i].activation, returnDiff = 1 ) if (self.layers[i].kind == 'Input' or self.layers[i].kind == 'Output'): if actWord == None: print("Matching Pattern = %s" % "No match") else: if diff == 0.0: print("Matching Pattern = '%s'; (exact)" % actWord) else: print("Matching Pattern = '%s'; Match difference = %f" % (actWord, diff)) if self.verbosity >= 1: weights = list(range(len(self.connections))) weights.reverse() for j in weights: if self.connections[j].toLayer.name == self.layers[i].name: self.connections[j].display()
python
def display(self): """ Displays the network to the screen. """ print("Display network '" + self.name + "':") size = list(range(len(self.layers))) size.reverse() for i in size: if self.layers[i].active: self.layers[i].display() if self.patterned and self.layers[i].type != 'Hidden': targetWord, diff = self.getWord( self.layers[i].target, returnDiff = 1) if self.layers[i].kind == 'Output': if targetWord == None: print("Target Pattern = %s" % "No match") else: if diff == 0.0: print("Target Pattern = '%s'; (exact)" % targetWord) else: print("Target Pattern = '%s'; Match difference = %f)" % (targetWord, diff)) actWord, diff = self.getWord( self.layers[i].activation, returnDiff = 1 ) if (self.layers[i].kind == 'Input' or self.layers[i].kind == 'Output'): if actWord == None: print("Matching Pattern = %s" % "No match") else: if diff == 0.0: print("Matching Pattern = '%s'; (exact)" % actWord) else: print("Matching Pattern = '%s'; Match difference = %f" % (actWord, diff)) if self.verbosity >= 1: weights = list(range(len(self.connections))) weights.reverse() for j in weights: if self.connections[j].toLayer.name == self.layers[i].name: self.connections[j].display()
Displays the network to the screen.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L2544-L2578
Calysto/calysto
calysto/ai/conx.py
Network.arrayify
def arrayify(self): """ Returns an array of node bias values and connection weights for use in a GA. """ gene = [] for layer in self.layers: if layer.type != 'Input': for i in range(layer.size): gene.append( layer.weight[i] ) for connection in self.connections: for i in range(connection.fromLayer.size): for j in range(connection.toLayer.size): gene.append( connection.weight[i][j] ) return gene
python
def arrayify(self): """ Returns an array of node bias values and connection weights for use in a GA. """ gene = [] for layer in self.layers: if layer.type != 'Input': for i in range(layer.size): gene.append( layer.weight[i] ) for connection in self.connections: for i in range(connection.fromLayer.size): for j in range(connection.toLayer.size): gene.append( connection.weight[i][j] ) return gene
Returns an array of node bias values and connection weights for use in a GA.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L2581-L2595
Calysto/calysto
calysto/ai/conx.py
Network.unArrayify
def unArrayify(self, gene): """ Copies gene bias values and weights to network bias values and weights. """ g = 0 # if gene is too small an IndexError will be thrown for layer in self.layers: if layer.type != 'Input': for i in range(layer.size): layer.weight[i] = float( gene[g]) g += 1 for connection in self.connections: for i in range(connection.fromLayer.size): for j in range(connection.toLayer.size): connection.weight[i][j] = gene[g] g += 1 # if gene is too long we may have a problem if len(gene) > g: raise IndexError('Argument to unArrayify is too long.', len(gene))
python
def unArrayify(self, gene): """ Copies gene bias values and weights to network bias values and weights. """ g = 0 # if gene is too small an IndexError will be thrown for layer in self.layers: if layer.type != 'Input': for i in range(layer.size): layer.weight[i] = float( gene[g]) g += 1 for connection in self.connections: for i in range(connection.fromLayer.size): for j in range(connection.toLayer.size): connection.weight[i][j] = gene[g] g += 1 # if gene is too long we may have a problem if len(gene) > g: raise IndexError('Argument to unArrayify is too long.', len(gene))
Copies gene bias values and weights to network bias values and weights.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L2596-L2615
Calysto/calysto
calysto/ai/conx.py
Network.saveWeightsToFile
def saveWeightsToFile(self, filename, mode='pickle', counter=None): """ Deprecated. Use saveWeights instead. """ self.saveWeights(filename, mode, counter)
python
def saveWeightsToFile(self, filename, mode='pickle', counter=None): """ Deprecated. Use saveWeights instead. """ self.saveWeights(filename, mode, counter)
Deprecated. Use saveWeights instead.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L2635-L2639
Calysto/calysto
calysto/ai/conx.py
Network.saveWeights
def saveWeights(self, filename, mode='pickle', counter=None): """ Saves weights to file in pickle, plain, or tlearn mode. """ # modes: pickle/conx, plain, tlearn if "?" in filename: # replace ? pattern in filename with epoch number import re char = "?" match = re.search(re.escape(char) + "+", filename) if match: num = self.epoch if counter != None: num = counter elif self.totalEpoch != 0: # use a total epoch, if one: num = self.totalEpoch fstring = "%%0%dd" % len(match.group()) filename = filename[:match.start()] + \ fstring % self.epoch + \ filename[match.end():] self.lastAutoSaveWeightsFilename = filename if mode == 'pickle': mylist = self.arrayify() import pickle fp = open(filename, "w") pickle.dump(mylist, fp) fp.close() elif mode in ['plain', 'conx']: fp = open(filename, "w") fp.write("# Biases\n") for layer in self.layers: if layer.type != 'Input': fp.write("# Layer: " + layer.name + "\n") for i in range(layer.size): fp.write("%f " % layer.weight[i] ) fp.write("\n") fp.write("# Weights\n") for connection in self.connections: fp.write("# from " + connection.fromLayer.name + " to " + connection.toLayer.name + "\n") for i in range(connection.fromLayer.size): for j in range(connection.toLayer.size): fp.write("%f " % connection.weight[i][j] ) fp.write("\n") fp.close() elif mode == 'tlearn': fp = open(filename, "w") fp.write("NETWORK CONFIGURED BY TLEARN\n") fp.write("# weights after %d sweeps\n" % self.epoch) fp.write("# WEIGHTS\n") cnt = 1 for lto in self.layers: if lto.type != 'Input': for j in range(lto.size): fp.write("# TO NODE %d\n" % cnt) fp.write("%f\n" % lto.weight[j] ) for lfrom in self.layers: try: connection = self.getConnection(lfrom.name,lto.name) for i in range(connection.fromLayer.size): fp.write("%f\n" % connection.weight[i][j]) except NetworkError: # should return an exception here for i in range(lfrom.size): fp.write("%f\n" % 0.0) cnt += 1 fp.close() else: raise ValueError('Unknown mode in saveWeights().', mode)
python
def saveWeights(self, filename, mode='pickle', counter=None): """ Saves weights to file in pickle, plain, or tlearn mode. """ # modes: pickle/conx, plain, tlearn if "?" in filename: # replace ? pattern in filename with epoch number import re char = "?" match = re.search(re.escape(char) + "+", filename) if match: num = self.epoch if counter != None: num = counter elif self.totalEpoch != 0: # use a total epoch, if one: num = self.totalEpoch fstring = "%%0%dd" % len(match.group()) filename = filename[:match.start()] + \ fstring % self.epoch + \ filename[match.end():] self.lastAutoSaveWeightsFilename = filename if mode == 'pickle': mylist = self.arrayify() import pickle fp = open(filename, "w") pickle.dump(mylist, fp) fp.close() elif mode in ['plain', 'conx']: fp = open(filename, "w") fp.write("# Biases\n") for layer in self.layers: if layer.type != 'Input': fp.write("# Layer: " + layer.name + "\n") for i in range(layer.size): fp.write("%f " % layer.weight[i] ) fp.write("\n") fp.write("# Weights\n") for connection in self.connections: fp.write("# from " + connection.fromLayer.name + " to " + connection.toLayer.name + "\n") for i in range(connection.fromLayer.size): for j in range(connection.toLayer.size): fp.write("%f " % connection.weight[i][j] ) fp.write("\n") fp.close() elif mode == 'tlearn': fp = open(filename, "w") fp.write("NETWORK CONFIGURED BY TLEARN\n") fp.write("# weights after %d sweeps\n" % self.epoch) fp.write("# WEIGHTS\n") cnt = 1 for lto in self.layers: if lto.type != 'Input': for j in range(lto.size): fp.write("# TO NODE %d\n" % cnt) fp.write("%f\n" % lto.weight[j] ) for lfrom in self.layers: try: connection = self.getConnection(lfrom.name,lto.name) for i in range(connection.fromLayer.size): fp.write("%f\n" % connection.weight[i][j]) except NetworkError: # should return an exception here for i in range(lfrom.size): fp.write("%f\n" % 0.0) cnt += 1 fp.close() else: raise ValueError('Unknown mode in saveWeights().', mode)
Saves weights to file in pickle, plain, or tlearn mode.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L2641-L2707
Calysto/calysto
calysto/ai/conx.py
Network.loadWeights
def loadWeights(self, filename, mode='pickle'): """ Loads weights from a file in pickle, plain, or tlearn mode. """ # modes: pickle, plain/conx, tlearn if mode == 'pickle': import pickle fp = open(filename, "r") mylist = pickle.load(fp) fp.close() self.unArrayify(mylist) elif mode in ['plain', 'conx']: arr = [] fp = open(filename, "r") lines = fp.readlines() for line in lines: line = line.strip() if line == '' or line[0] == '#': pass else: data = list(map( float, line.split())) arr.extend( data ) self.unArrayify( arr ) fp.close() elif mode == 'tlearn': fp = open(filename, "r") fp.readline() # NETWORK CONFIGURED BY fp.readline() # # weights after %d sweeps fp.readline() # # WEIGHTS cnt = 1 for lto in self.layers: if lto.type != 'Input': for j in range(lto.size): fp.readline() # TO NODE %d lto.weight[j] = float(fp.readline()) for lfrom in self.layers: try: connection = self.getConnection(lfrom.name, lto.name) for i in range(connection.fromLayer.size): connection.weight[i][j] = float( fp.readline() ) except NetworkError: for i in range(lfrom.size): # 0.0 fp.readline() cnt += 1 fp.close() elif mode == 'nbench': # reads weights and constructs network fp = open(filename, "r") line = fp.readline() while line[:8] != "Ninputs:": line = fp.readline() itxt, inputs, ttxt, total, otxt, outputs = line.split() inputs, total, outputs = int(inputs), int(total), int(outputs) # cascor's total value is the bias + inputs + hiddens hiddens = total - inputs - 1 # create the layers: self.addLayer("input", inputs) for i in range(hiddens): self.addLayer("hidden%d" % i, 1) self.addLayer("output", outputs) # connect input to all hiddens, output: for i in range(hiddens): self.connect("input", "hidden%d" % i) self.connect("input", "output") # connect all hiddens to all later hiddens: for i in range(hiddens - 1): for j in range(i + 1, hiddens): if j != i: self.connect("hidden%d" % i, "hidden%d" % j ) # connect all hiddens to outputs: for i in range(hiddens): self.connect("hidden%d" % i, "output") # now, let's set the weights: for outcount in range(outputs): while line[:9] != "# Output:": line = fp.readline() line = fp.readline() # $ line = fp.readline() # bias, input to output, hidden to output? data = "" while line and line[0] != "#": data += " " + line.strip() line = fp.readline() weights = list(map(float, data.split())) self["output"].weight[outcount] = weights[0] # bias next = 1 for i in range(self["input"].size): self["input", "output"].weight[i][outcount] = weights[next] next += 1 for h in range(hiddens): for i in range(self["hidden%d" % h].size): # normally just 1 self["hidden%d" % h, "output"].weight[i][outcount] = weights[next] next += 1 # now, for each hidden "layer": while line and line[0] != "$": line = fp.readline() line = fp.readline() for hidcount in range(hiddens): weights = [] while line and line[0] != "$" and line[0] != "#": # next line is a weight line weights.extend( list(map(float, line.split()))) # bias, input to hidden, hidden to hidden? line = fp.readline() self[("hidden%d" % hidcount)].weight[0] = weights[0] # bias first next = 1 for i in range(self["input"].size): for j in range(self["hidden%d" % hidcount].size): # normally just 1 self["input", ("hidden%d" % hidcount)].weight[i][j] = weights[next] next += 1 for h in range(hidcount): # all those hids leading up to this one for i in range(self["hidden%d" % h].size): # normally just 1 for j in range(self["hidden%d" % hidcount].size): # normally just 1 self[("hidden%d" % h), ("hidden%d" % hidcount)].weight[i][j] = weights[next] next += 1 line = fp.readline() # $ line = fp.readline() # beginning of weights else: raise ValueError('Unknown mode in loadWeights()', mode)
python
def loadWeights(self, filename, mode='pickle'): """ Loads weights from a file in pickle, plain, or tlearn mode. """ # modes: pickle, plain/conx, tlearn if mode == 'pickle': import pickle fp = open(filename, "r") mylist = pickle.load(fp) fp.close() self.unArrayify(mylist) elif mode in ['plain', 'conx']: arr = [] fp = open(filename, "r") lines = fp.readlines() for line in lines: line = line.strip() if line == '' or line[0] == '#': pass else: data = list(map( float, line.split())) arr.extend( data ) self.unArrayify( arr ) fp.close() elif mode == 'tlearn': fp = open(filename, "r") fp.readline() # NETWORK CONFIGURED BY fp.readline() # # weights after %d sweeps fp.readline() # # WEIGHTS cnt = 1 for lto in self.layers: if lto.type != 'Input': for j in range(lto.size): fp.readline() # TO NODE %d lto.weight[j] = float(fp.readline()) for lfrom in self.layers: try: connection = self.getConnection(lfrom.name, lto.name) for i in range(connection.fromLayer.size): connection.weight[i][j] = float( fp.readline() ) except NetworkError: for i in range(lfrom.size): # 0.0 fp.readline() cnt += 1 fp.close() elif mode == 'nbench': # reads weights and constructs network fp = open(filename, "r") line = fp.readline() while line[:8] != "Ninputs:": line = fp.readline() itxt, inputs, ttxt, total, otxt, outputs = line.split() inputs, total, outputs = int(inputs), int(total), int(outputs) # cascor's total value is the bias + inputs + hiddens hiddens = total - inputs - 1 # create the layers: self.addLayer("input", inputs) for i in range(hiddens): self.addLayer("hidden%d" % i, 1) self.addLayer("output", outputs) # connect input to all hiddens, output: for i in range(hiddens): self.connect("input", "hidden%d" % i) self.connect("input", "output") # connect all hiddens to all later hiddens: for i in range(hiddens - 1): for j in range(i + 1, hiddens): if j != i: self.connect("hidden%d" % i, "hidden%d" % j ) # connect all hiddens to outputs: for i in range(hiddens): self.connect("hidden%d" % i, "output") # now, let's set the weights: for outcount in range(outputs): while line[:9] != "# Output:": line = fp.readline() line = fp.readline() # $ line = fp.readline() # bias, input to output, hidden to output? data = "" while line and line[0] != "#": data += " " + line.strip() line = fp.readline() weights = list(map(float, data.split())) self["output"].weight[outcount] = weights[0] # bias next = 1 for i in range(self["input"].size): self["input", "output"].weight[i][outcount] = weights[next] next += 1 for h in range(hiddens): for i in range(self["hidden%d" % h].size): # normally just 1 self["hidden%d" % h, "output"].weight[i][outcount] = weights[next] next += 1 # now, for each hidden "layer": while line and line[0] != "$": line = fp.readline() line = fp.readline() for hidcount in range(hiddens): weights = [] while line and line[0] != "$" and line[0] != "#": # next line is a weight line weights.extend( list(map(float, line.split()))) # bias, input to hidden, hidden to hidden? line = fp.readline() self[("hidden%d" % hidcount)].weight[0] = weights[0] # bias first next = 1 for i in range(self["input"].size): for j in range(self["hidden%d" % hidcount].size): # normally just 1 self["input", ("hidden%d" % hidcount)].weight[i][j] = weights[next] next += 1 for h in range(hidcount): # all those hids leading up to this one for i in range(self["hidden%d" % h].size): # normally just 1 for j in range(self["hidden%d" % hidcount].size): # normally just 1 self[("hidden%d" % h), ("hidden%d" % hidcount)].weight[i][j] = weights[next] next += 1 line = fp.readline() # $ line = fp.readline() # beginning of weights else: raise ValueError('Unknown mode in loadWeights()', mode)
Loads weights from a file in pickle, plain, or tlearn mode.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L2715-L2831
Calysto/calysto
calysto/ai/conx.py
Network.saveNetwork
def saveNetwork(self, filename, makeWrapper = 1, mode = "pickle", counter = None): """ Saves network to file using pickle. """ self.saveNetworkToFile(filename, makeWrapper, mode, counter)
python
def saveNetwork(self, filename, makeWrapper = 1, mode = "pickle", counter = None): """ Saves network to file using pickle. """ self.saveNetworkToFile(filename, makeWrapper, mode, counter)
Saves network to file using pickle.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L2833-L2837
Calysto/calysto
calysto/ai/conx.py
Network.saveNetworkToFile
def saveNetworkToFile(self, filename, makeWrapper = 1, mode = "pickle", counter = None): """ Deprecated. """ if "?" in filename: # replace ? pattern in filename with epoch number import re char = "?" match = re.search(re.escape(char) + "+", filename) if match: num = self.epoch if counter != None: num = counter elif self.totalEpoch != 0: # use a total epoch, if one: num = self.totalEpoch fstring = "%%0%dd" % len(match.group()) filename = filename[:match.start()] + \ fstring % num + \ filename[match.end():] self.lastAutoSaveNetworkFilename = filename if mode == "pickle": # dump network via pickle: import pickle basename = filename.split('.')[0] filename += ".pickle" fp = open(filename, 'w') pickle.dump(self, fp) fp.close() # make wrapper python file: if makeWrapper: fp = open(basename + ".py", "w") fp.write("from pyrobot.brain.conx import *\n") fp.write("import pickle\n") fp.write("fp = open('%s', 'r')\n" % filename) fp.write("network = pickle.load(fp)") fp.close() # give some help: print("To load network:") print(" %% python -i %s " % (basename + ".py")) print(" >>> network.train() # for example") print("--- OR ---") print(" % python") print(" >>> from pyrobot.brain.conx import *") print(" >>> network = loadNetwork(%s)" % filename) print(" >>> network.train() # for example") elif mode in ["plain", "conx"]: fp = open(filename, "w") fp.write("network, %s\n" % (self.__class__.__name__)) for layer in self.layers: fp.write("layer, %s, %s\n" % (layer.name, layer.size)) # biases: for i in range(layer.size): fp.write("%f " % layer.weight[i]) fp.write("\n") for connection in self.connections: fp.write("connection, %s, %s\n" %(connection.fromLayer.name, connection.toLayer.name)) # weights: for i in range(connection.fromLayer.size): for j in range(connection.toLayer.size): fp.write("%f " % connection.weight[i][j]) fp.write("\n") fp.close()
python
def saveNetworkToFile(self, filename, makeWrapper = 1, mode = "pickle", counter = None): """ Deprecated. """ if "?" in filename: # replace ? pattern in filename with epoch number import re char = "?" match = re.search(re.escape(char) + "+", filename) if match: num = self.epoch if counter != None: num = counter elif self.totalEpoch != 0: # use a total epoch, if one: num = self.totalEpoch fstring = "%%0%dd" % len(match.group()) filename = filename[:match.start()] + \ fstring % num + \ filename[match.end():] self.lastAutoSaveNetworkFilename = filename if mode == "pickle": # dump network via pickle: import pickle basename = filename.split('.')[0] filename += ".pickle" fp = open(filename, 'w') pickle.dump(self, fp) fp.close() # make wrapper python file: if makeWrapper: fp = open(basename + ".py", "w") fp.write("from pyrobot.brain.conx import *\n") fp.write("import pickle\n") fp.write("fp = open('%s', 'r')\n" % filename) fp.write("network = pickle.load(fp)") fp.close() # give some help: print("To load network:") print(" %% python -i %s " % (basename + ".py")) print(" >>> network.train() # for example") print("--- OR ---") print(" % python") print(" >>> from pyrobot.brain.conx import *") print(" >>> network = loadNetwork(%s)" % filename) print(" >>> network.train() # for example") elif mode in ["plain", "conx"]: fp = open(filename, "w") fp.write("network, %s\n" % (self.__class__.__name__)) for layer in self.layers: fp.write("layer, %s, %s\n" % (layer.name, layer.size)) # biases: for i in range(layer.size): fp.write("%f " % layer.weight[i]) fp.write("\n") for connection in self.connections: fp.write("connection, %s, %s\n" %(connection.fromLayer.name, connection.toLayer.name)) # weights: for i in range(connection.fromLayer.size): for j in range(connection.toLayer.size): fp.write("%f " % connection.weight[i][j]) fp.write("\n") fp.close()
Deprecated.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L2839-L2899
Calysto/calysto
calysto/ai/conx.py
Network.loadVectors
def loadVectors(self, filename, cols = None, everyNrows = 1, delim = ' ', checkEven = 1, patterned = 0): """ Load a set of vectors from a file. Takes a filename, list of cols you want (or None for all), get every everyNrows (or 1 for no skipping), and a delimeter. """ return self.loadVectorsFromFile(filename, cols, everyNrows, delim, checkEven, patterned)
python
def loadVectors(self, filename, cols = None, everyNrows = 1, delim = ' ', checkEven = 1, patterned = 0): """ Load a set of vectors from a file. Takes a filename, list of cols you want (or None for all), get every everyNrows (or 1 for no skipping), and a delimeter. """ return self.loadVectorsFromFile(filename, cols, everyNrows, delim, checkEven, patterned)
Load a set of vectors from a file. Takes a filename, list of cols you want (or None for all), get every everyNrows (or 1 for no skipping), and a delimeter.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L2901-L2909
Calysto/calysto
calysto/ai/conx.py
Network.loadVectorsFromFile
def loadVectorsFromFile(self, filename, cols = None, everyNrows = 1, delim = ' ', checkEven = 1, patterned = 0): """ Deprecated. """ fp = open(filename, "r") line = fp.readline() lineno = 0 lastLength = None data = [] while line: if lineno % everyNrows == 0: if patterned: linedata1 = [x for x in line.strip().split(delim)] else: linedata1 = [float(x) for x in line.strip().split(delim)] else: lineno += 1 line = fp.readline() continue if cols == None: # get em all newdata = linedata1 else: # just get some cols newdata = [] for i in cols: newdata.append( linedata1[i] ) if lastLength == None or (not checkEven) or (checkEven and len(newdata) == lastLength): data.append( newdata ) else: raise ValueError("Data Format Error: line = %d, data = %s" % (lineno, newdata)) lastLength = len(newdata) lineno += 1 line = fp.readline() fp.close() return data
python
def loadVectorsFromFile(self, filename, cols = None, everyNrows = 1, delim = ' ', checkEven = 1, patterned = 0): """ Deprecated. """ fp = open(filename, "r") line = fp.readline() lineno = 0 lastLength = None data = [] while line: if lineno % everyNrows == 0: if patterned: linedata1 = [x for x in line.strip().split(delim)] else: linedata1 = [float(x) for x in line.strip().split(delim)] else: lineno += 1 line = fp.readline() continue if cols == None: # get em all newdata = linedata1 else: # just get some cols newdata = [] for i in cols: newdata.append( linedata1[i] ) if lastLength == None or (not checkEven) or (checkEven and len(newdata) == lastLength): data.append( newdata ) else: raise ValueError("Data Format Error: line = %d, data = %s" % (lineno, newdata)) lastLength = len(newdata) lineno += 1 line = fp.readline() fp.close() return data
Deprecated.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L2911-L2945
Calysto/calysto
calysto/ai/conx.py
Network.loadInputPatterns
def loadInputPatterns(self, filename, cols = None, everyNrows = 1, delim = ' ', checkEven = 1): """ Loads inputs as patterns from file. """ self.loadInputPatternsFromFile(filename, cols, everyNrows, delim, checkEven)
python
def loadInputPatterns(self, filename, cols = None, everyNrows = 1, delim = ' ', checkEven = 1): """ Loads inputs as patterns from file. """ self.loadInputPatternsFromFile(filename, cols, everyNrows, delim, checkEven)
Loads inputs as patterns from file.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L2947-L2953
Calysto/calysto
calysto/ai/conx.py
Network.loadInputPatternsFromFile
def loadInputPatternsFromFile(self, filename, cols = None, everyNrows = 1, delim = ' ', checkEven = 1): """ Deprecated. """ self.inputs = self.loadVectors(filename, cols, everyNrows, delim, checkEven, patterned = 1) self.loadOrder = [0] * len(self.inputs) for i in range(len(self.inputs)): self.loadOrder[i] = i
python
def loadInputPatternsFromFile(self, filename, cols = None, everyNrows = 1, delim = ' ', checkEven = 1): """ Deprecated. """ self.inputs = self.loadVectors(filename, cols, everyNrows, delim, checkEven, patterned = 1) self.loadOrder = [0] * len(self.inputs) for i in range(len(self.inputs)): self.loadOrder[i] = i
Deprecated.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L2955-L2963
Calysto/calysto
calysto/ai/conx.py
Network.loadInputs
def loadInputs(self, filename, cols = None, everyNrows = 1, delim = ' ', checkEven = 1): """ Loads inputs from file. Patterning is lost. """ self.loadInputsFromFile(filename, cols, everyNrows, delim, checkEven)
python
def loadInputs(self, filename, cols = None, everyNrows = 1, delim = ' ', checkEven = 1): """ Loads inputs from file. Patterning is lost. """ self.loadInputsFromFile(filename, cols, everyNrows, delim, checkEven)
Loads inputs from file. Patterning is lost.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L2965-L2971
Calysto/calysto
calysto/ai/conx.py
Network.saveInputsToFile
def saveInputsToFile(self, filename): """ Deprecated. """ fp = open(filename, 'w') for input in self.inputs: vec = self.replacePatterns(input) for item in vec: fp.write("%f " % item) fp.write("\n")
python
def saveInputsToFile(self, filename): """ Deprecated. """ fp = open(filename, 'w') for input in self.inputs: vec = self.replacePatterns(input) for item in vec: fp.write("%f " % item) fp.write("\n")
Deprecated.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L2989-L2998
Calysto/calysto
calysto/ai/conx.py
Network.loadTargets
def loadTargets(self, filename, cols = None, everyNrows = 1, delim = ' ', checkEven = 1): """ Loads targets from file. """ self.loadTargetsFromFile(filename, cols, everyNrows, delim, checkEven)
python
def loadTargets(self, filename, cols = None, everyNrows = 1, delim = ' ', checkEven = 1): """ Loads targets from file. """ self.loadTargetsFromFile(filename, cols, everyNrows, delim, checkEven)
Loads targets from file.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L3000-L3006
Calysto/calysto
calysto/ai/conx.py
Network.loadTargetsFromFile
def loadTargetsFromFile(self, filename, cols = None, everyNrows = 1, delim = ' ', checkEven = 1): """ Loads targets from file. """ self.targets = self.loadVectors(filename, cols, everyNrows, delim, checkEven)
python
def loadTargetsFromFile(self, filename, cols = None, everyNrows = 1, delim = ' ', checkEven = 1): """ Loads targets from file. """ self.targets = self.loadVectors(filename, cols, everyNrows, delim, checkEven)
Loads targets from file.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L3008-L3014
Calysto/calysto
calysto/ai/conx.py
Network.loadTargetPatterns
def loadTargetPatterns(self, filename, cols = None, everyNrows = 1, delim = ' ', checkEven = 1): """ Loads targets as patterns from file. """ self.loadTargetPatternssFromFile(filename, cols, everyNrows, delim, checkEven)
python
def loadTargetPatterns(self, filename, cols = None, everyNrows = 1, delim = ' ', checkEven = 1): """ Loads targets as patterns from file. """ self.loadTargetPatternssFromFile(filename, cols, everyNrows, delim, checkEven)
Loads targets as patterns from file.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L3016-L3022
Calysto/calysto
calysto/ai/conx.py
Network.loadTargetPatternsFromFile
def loadTargetPatternsFromFile(self, filename, cols = None, everyNrows = 1, delim = ' ', checkEven = 1): """ Deprecated. """ self.targets = self.loadVectors(filename, cols, everyNrows, delim, checkEven, patterned=1)
python
def loadTargetPatternsFromFile(self, filename, cols = None, everyNrows = 1, delim = ' ', checkEven = 1): """ Deprecated. """ self.targets = self.loadVectors(filename, cols, everyNrows, delim, checkEven, patterned=1)
Deprecated.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L3024-L3030
Calysto/calysto
calysto/ai/conx.py
Network.saveTargetsToFile
def saveTargetsToFile(self, filename): """ Deprecated. """ fp = open(filename, 'w') for target in self.targets: vec = self.replacePatterns(target) for item in vec: fp.write("%f " % item) fp.write("\n")
python
def saveTargetsToFile(self, filename): """ Deprecated. """ fp = open(filename, 'w') for target in self.targets: vec = self.replacePatterns(target) for item in vec: fp.write("%f " % item) fp.write("\n")
Deprecated.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L3038-L3047
Calysto/calysto
calysto/ai/conx.py
Network.saveDataToFile
def saveDataToFile(self, filename): """ Deprecated. """ fp = open(filename, 'w') for i in range(len(self.inputs)): try: vec = self.replacePatterns(self.inputs[i]) for item in vec: fp.write("%f " % item) except: pass try: vec = self.replacePatterns(self.targets[i]) for item in vec: fp.write("%f " % item) except: pass fp.write("\n")
python
def saveDataToFile(self, filename): """ Deprecated. """ fp = open(filename, 'w') for i in range(len(self.inputs)): try: vec = self.replacePatterns(self.inputs[i]) for item in vec: fp.write("%f " % item) except: pass try: vec = self.replacePatterns(self.targets[i]) for item in vec: fp.write("%f " % item) except: pass fp.write("\n")
Deprecated.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L3055-L3073
Calysto/calysto
calysto/ai/conx.py
Network.loadDataFromFile
def loadDataFromFile(self, filename, ocnt = -1): """ Deprecated. """ if ocnt == -1: ocnt = int(self.layers[len(self.layers) - 1].size) fp = open(filename, 'r') line = fp.readline() self.targets = [] self.inputs = [] while line: data = list(map(float, line.split())) cnt = len(data) icnt = cnt - ocnt self.inputs.append(self.patternVector(data[0:icnt])) self.targets.append(self.patternVector(data[icnt:])) line = fp.readline() self.loadOrder = [0] * len(self.inputs) for i in range(len(self.inputs)): self.loadOrder[i] = i
python
def loadDataFromFile(self, filename, ocnt = -1): """ Deprecated. """ if ocnt == -1: ocnt = int(self.layers[len(self.layers) - 1].size) fp = open(filename, 'r') line = fp.readline() self.targets = [] self.inputs = [] while line: data = list(map(float, line.split())) cnt = len(data) icnt = cnt - ocnt self.inputs.append(self.patternVector(data[0:icnt])) self.targets.append(self.patternVector(data[icnt:])) line = fp.readline() self.loadOrder = [0] * len(self.inputs) for i in range(len(self.inputs)): self.loadOrder[i] = i
Deprecated.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L3081-L3100
Calysto/calysto
calysto/ai/conx.py
Network.lookupPattern
def lookupPattern(self, name, layer): """ See if there is a name/layer pattern combo, else return the name pattern. """ if (name, layer) in self.patterns: return self.patterns[(name, layer)] else: return self.patterns[name]
python
def lookupPattern(self, name, layer): """ See if there is a name/layer pattern combo, else return the name pattern. """ if (name, layer) in self.patterns: return self.patterns[(name, layer)] else: return self.patterns[name]
See if there is a name/layer pattern combo, else return the name pattern.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L3103-L3108
Calysto/calysto
calysto/ai/conx.py
Network.replacePatterns
def replacePatterns(self, vector, layer = None): """ Replaces patterned inputs or targets with activation vectors. """ if not self.patterned: return vector if type(vector) == str: return self.replacePatterns(self.lookupPattern(vector, layer), layer) elif type(vector) != list: return vector # should be a vector if we made it here vec = [] for v in vector: if type(v) == str: retval = self.replacePatterns(self.lookupPattern(v, layer), layer) if type(retval) == list: vec.extend( retval ) else: vec.append( retval ) else: vec.append( v ) return vec
python
def replacePatterns(self, vector, layer = None): """ Replaces patterned inputs or targets with activation vectors. """ if not self.patterned: return vector if type(vector) == str: return self.replacePatterns(self.lookupPattern(vector, layer), layer) elif type(vector) != list: return vector # should be a vector if we made it here vec = [] for v in vector: if type(v) == str: retval = self.replacePatterns(self.lookupPattern(v, layer), layer) if type(retval) == list: vec.extend( retval ) else: vec.append( retval ) else: vec.append( v ) return vec
Replaces patterned inputs or targets with activation vectors.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L3109-L3129
Calysto/calysto
calysto/ai/conx.py
Network.patternVector
def patternVector(self, vector): """ Replaces vector with patterns. Used for loading inputs or targets from a file and still preserving patterns. """ if not self.patterned: return vector if type(vector) == int: if self.getWord(vector) != '': return self.getWord(vector) else: return vector elif type(vector) == float: if self.getWord(vector) != '': return self.getWord(vector) else: return vector elif type(vector) == str: return vector elif type(vector) == list: if self.getWord(vector) != '': return self.getWord(vector) # should be a list vec = [] for v in vector: if self.getWord(v) != '': retval = self.getWord(v) vec.append( retval ) else: retval = self.patternVector(v) vec.append( retval ) return vec
python
def patternVector(self, vector): """ Replaces vector with patterns. Used for loading inputs or targets from a file and still preserving patterns. """ if not self.patterned: return vector if type(vector) == int: if self.getWord(vector) != '': return self.getWord(vector) else: return vector elif type(vector) == float: if self.getWord(vector) != '': return self.getWord(vector) else: return vector elif type(vector) == str: return vector elif type(vector) == list: if self.getWord(vector) != '': return self.getWord(vector) # should be a list vec = [] for v in vector: if self.getWord(v) != '': retval = self.getWord(v) vec.append( retval ) else: retval = self.patternVector(v) vec.append( retval ) return vec
Replaces vector with patterns. Used for loading inputs or targets from a file and still preserving patterns.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L3130-L3160
Calysto/calysto
calysto/ai/conx.py
Network.getPattern
def getPattern(self, word): """ Returns the pattern with key word. Example: net.getPattern("tom") => [0, 0, 0, 1] """ if word in self.patterns: return self.patterns[word] else: raise ValueError('Unknown pattern in getPattern().', word)
python
def getPattern(self, word): """ Returns the pattern with key word. Example: net.getPattern("tom") => [0, 0, 0, 1] """ if word in self.patterns: return self.patterns[word] else: raise ValueError('Unknown pattern in getPattern().', word)
Returns the pattern with key word. Example: net.getPattern("tom") => [0, 0, 0, 1]
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L3172-L3182
Calysto/calysto
calysto/ai/conx.py
Network.getWord
def getWord(self, pattern, returnDiff = 0): """ Returns the word associated with pattern. Example: net.getWord([0, 0, 0, 1]) => "tom" This method now returns the closest pattern based on distance. """ minDist = 10000 closest = None for w in self.patterns: # There may be some patterns that are scalars; we don't search # those in this function: if type(self.patterns[w]) in [int, float, int]: continue if len(self.patterns[w]) == len(pattern): dist = reduce(operator.add, [(a - b) ** 2 for (a,b) in zip(self.patterns[w], pattern )]) if dist == 0.0: if returnDiff: return w, dist else: return w if dist < minDist: minDist = dist closest = w if returnDiff: return closest, minDist else: return closest
python
def getWord(self, pattern, returnDiff = 0): """ Returns the word associated with pattern. Example: net.getWord([0, 0, 0, 1]) => "tom" This method now returns the closest pattern based on distance. """ minDist = 10000 closest = None for w in self.patterns: # There may be some patterns that are scalars; we don't search # those in this function: if type(self.patterns[w]) in [int, float, int]: continue if len(self.patterns[w]) == len(pattern): dist = reduce(operator.add, [(a - b) ** 2 for (a,b) in zip(self.patterns[w], pattern )]) if dist == 0.0: if returnDiff: return w, dist else: return w if dist < minDist: minDist = dist closest = w if returnDiff: return closest, minDist else: return closest
Returns the word associated with pattern. Example: net.getWord([0, 0, 0, 1]) => "tom" This method now returns the closest pattern based on distance.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L3183-L3210
Calysto/calysto
calysto/ai/conx.py
Network.addPattern
def addPattern(self, word, vector): """ Adds a pattern with key word. Example: net.addPattern("tom", [0, 0, 0, 1]) """ if word in self.patterns: raise NetworkError('Pattern key already in use. Call delPattern to free key.', word) else: self.patterns[word] = vector
python
def addPattern(self, word, vector): """ Adds a pattern with key word. Example: net.addPattern("tom", [0, 0, 0, 1]) """ if word in self.patterns: raise NetworkError('Pattern key already in use. Call delPattern to free key.', word) else: self.patterns[word] = vector
Adds a pattern with key word. Example: net.addPattern("tom", [0, 0, 0, 1])
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L3221-L3231
Calysto/calysto
calysto/ai/conx.py
Network.compare
def compare(self, v1, v2): """ Compares two values. Returns 1 if all values are withing self.tolerance of each other. """ try: if len(v1) != len(v2): return 0 for x, y in zip(v1, v2): if abs( x - y ) > self.tolerance: return 0 return 1 except: # some patterns may not be vectors try: if abs( v1 - v2 ) > self.tolerance: return 0 else: return 1 except: return 0
python
def compare(self, v1, v2): """ Compares two values. Returns 1 if all values are withing self.tolerance of each other. """ try: if len(v1) != len(v2): return 0 for x, y in zip(v1, v2): if abs( x - y ) > self.tolerance: return 0 return 1 except: # some patterns may not be vectors try: if abs( v1 - v2 ) > self.tolerance: return 0 else: return 1 except: return 0
Compares two values. Returns 1 if all values are withing self.tolerance of each other.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L3241-L3260
Calysto/calysto
calysto/ai/conx.py
Network.shareWeights
def shareWeights(self, network, listOfLayerNamePairs = None): """ Share weights with another network. Connection is broken after a randomize or change of size. Layers must have the same names and sizes for shared connections in both networks. Example: net.shareWeights(otherNet, [["hidden", "output"]]) This example will take the weights between the hidden and output layers of otherNet and share them with net. Also, the bias values of otherNet["output"] will be shared with net["output"]. If no list is given, will share all weights. """ if listOfLayerNamePairs == None: listOfLayerNamePairs = [] for c in self.connections: listOfLayerNamePairs.append( [c.fromLayer.name, c.toLayer.name] ) if self.verbosity > 1: print("sharing weights:", self.name, listOfLayerNamePairs) # first, check to see if this will work: count = 0 for (fromLayerName, toLayerName) in listOfLayerNamePairs: for c1 in range(len(self.connections)): if self.connections[c1].fromLayer.name == fromLayerName and \ self.connections[c1].toLayer.name == toLayerName: for c2 in range(len(network.connections)): if network.connections[c2].fromLayer.name == fromLayerName and \ network.connections[c2].toLayer.name == toLayerName: if (self.connections[c1].fromLayer.size != network.connections[c2].fromLayer.size) or \ (self.connections[c1].toLayer.size != network.connections[c2].toLayer.size): raise AttributeError("shareSomeWeights: layer sizes did not match") count += 1 if count != len(listOfLayerNamePairs): raise AttributeError("shareSomeWeights: layer names did not match") # ok, now let's share! self.sharedWeights = 1 network.sharedWeights = 1 for (fromLayerName, toLayerName) in listOfLayerNamePairs: for c1 in range(len(self.connections)): if self.connections[c1].fromLayer.name == fromLayerName and \ self.connections[c1].toLayer.name == toLayerName: for c2 in range(len(network.connections)): if network.connections[c2].fromLayer.name == fromLayerName and \ network.connections[c2].toLayer.name == toLayerName: self.connections[c1].weight = network.connections[c2].weight for (fromLayerName, toLayerName) in listOfLayerNamePairs: for l1 in range(len(self.layers)): if self.layers[l1].name == toLayerName: for l2 in range(len(network.layers)): if network.layers[l2].name == toLayerName: self.layers[l1].weight = network.layers[l2].weight
python
def shareWeights(self, network, listOfLayerNamePairs = None): """ Share weights with another network. Connection is broken after a randomize or change of size. Layers must have the same names and sizes for shared connections in both networks. Example: net.shareWeights(otherNet, [["hidden", "output"]]) This example will take the weights between the hidden and output layers of otherNet and share them with net. Also, the bias values of otherNet["output"] will be shared with net["output"]. If no list is given, will share all weights. """ if listOfLayerNamePairs == None: listOfLayerNamePairs = [] for c in self.connections: listOfLayerNamePairs.append( [c.fromLayer.name, c.toLayer.name] ) if self.verbosity > 1: print("sharing weights:", self.name, listOfLayerNamePairs) # first, check to see if this will work: count = 0 for (fromLayerName, toLayerName) in listOfLayerNamePairs: for c1 in range(len(self.connections)): if self.connections[c1].fromLayer.name == fromLayerName and \ self.connections[c1].toLayer.name == toLayerName: for c2 in range(len(network.connections)): if network.connections[c2].fromLayer.name == fromLayerName and \ network.connections[c2].toLayer.name == toLayerName: if (self.connections[c1].fromLayer.size != network.connections[c2].fromLayer.size) or \ (self.connections[c1].toLayer.size != network.connections[c2].toLayer.size): raise AttributeError("shareSomeWeights: layer sizes did not match") count += 1 if count != len(listOfLayerNamePairs): raise AttributeError("shareSomeWeights: layer names did not match") # ok, now let's share! self.sharedWeights = 1 network.sharedWeights = 1 for (fromLayerName, toLayerName) in listOfLayerNamePairs: for c1 in range(len(self.connections)): if self.connections[c1].fromLayer.name == fromLayerName and \ self.connections[c1].toLayer.name == toLayerName: for c2 in range(len(network.connections)): if network.connections[c2].fromLayer.name == fromLayerName and \ network.connections[c2].toLayer.name == toLayerName: self.connections[c1].weight = network.connections[c2].weight for (fromLayerName, toLayerName) in listOfLayerNamePairs: for l1 in range(len(self.layers)): if self.layers[l1].name == toLayerName: for l2 in range(len(network.layers)): if network.layers[l2].name == toLayerName: self.layers[l1].weight = network.layers[l2].weight
Share weights with another network. Connection is broken after a randomize or change of size. Layers must have the same names and sizes for shared connections in both networks. Example: net.shareWeights(otherNet, [["hidden", "output"]]) This example will take the weights between the hidden and output layers of otherNet and share them with net. Also, the bias values of otherNet["output"] will be shared with net["output"]. If no list is given, will share all weights.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L3261-L3312
Calysto/calysto
calysto/ai/conx.py
BackpropNetwork.propagate
def propagate(self, *arg, **kw): """ Propagates activation through the network.""" output = Network.propagate(self, *arg, **kw) if self.interactive: self.updateGraphics() # IMPORTANT: convert results from numpy.floats to conventional floats if type(output) == dict: for layerName in output: output[layerName] = [float(x) for x in output[layerName]] return output else: return [float(x) for x in output]
python
def propagate(self, *arg, **kw): """ Propagates activation through the network.""" output = Network.propagate(self, *arg, **kw) if self.interactive: self.updateGraphics() # IMPORTANT: convert results from numpy.floats to conventional floats if type(output) == dict: for layerName in output: output[layerName] = [float(x) for x in output[layerName]] return output else: return [float(x) for x in output]
Propagates activation through the network.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L4286-L4297
Calysto/calysto
calysto/ai/conx.py
BackpropNetwork.loadWeightsFromFile
def loadWeightsFromFile(self, filename, mode='pickle'): """ Deprecated. Use loadWeights instead. """ Network.loadWeights(self, filename, mode) self.updateGraphics()
python
def loadWeightsFromFile(self, filename, mode='pickle'): """ Deprecated. Use loadWeights instead. """ Network.loadWeights(self, filename, mode) self.updateGraphics()
Deprecated. Use loadWeights instead.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L4304-L4309
Calysto/calysto
calysto/ai/conx.py
BackpropNetwork.display
def display(self): """Displays the network to the screen.""" size = list(range(len(self.layers))) size.reverse() for i in size: layer = self.layers[i] if layer.active: print('%s layer (size %d)' % (layer.name, layer.size)) tlabel, olabel = '', '' if (layer.type == 'Output'): if self.countWrong: tlabel = ' (%s)' % self.classify(layer.target.tolist()) olabel = ' (%s)' % self.classify(layer.activation.tolist()) if olabel == tlabel: self.numRight += 1 else: olabel += ' *** WRONG ***' self.numWrong += 1 if self.actDisplay is not None: self.actDisplay.showWrong() print('Target : %s%s' % (pretty(layer.target, max=15), tlabel)) print('Activation: %s%s' % (pretty(layer.activation, max=15), olabel)) if self.patterned and layer.type != 'Hidden': targetWord, diff = self.getWord( layer.target, returnDiff = 1) if layer.kind == 'Output': if targetWord == None: print("Target Pattern = %s" % "No match") else: if diff == 0.0: print("Target Pattern = '%s'" % targetWord) else: print("Target Pattern = '%s'; difference = %f)" % (targetWord, diff)) actWord, diff = self.getWord( layer.activation, returnDiff = 1 ) if (layer.kind == 'Input' or layer.kind == 'Output'): if actWord == None: print("Matching Pattern = %s" % "No match") else: if diff == 0.0: print("Matching Pattern = '%s'" % actWord) else: print("Matching Pattern = '%s'; difference = %f" % (actWord, diff)) print("------------------------------------")
python
def display(self): """Displays the network to the screen.""" size = list(range(len(self.layers))) size.reverse() for i in size: layer = self.layers[i] if layer.active: print('%s layer (size %d)' % (layer.name, layer.size)) tlabel, olabel = '', '' if (layer.type == 'Output'): if self.countWrong: tlabel = ' (%s)' % self.classify(layer.target.tolist()) olabel = ' (%s)' % self.classify(layer.activation.tolist()) if olabel == tlabel: self.numRight += 1 else: olabel += ' *** WRONG ***' self.numWrong += 1 if self.actDisplay is not None: self.actDisplay.showWrong() print('Target : %s%s' % (pretty(layer.target, max=15), tlabel)) print('Activation: %s%s' % (pretty(layer.activation, max=15), olabel)) if self.patterned and layer.type != 'Hidden': targetWord, diff = self.getWord( layer.target, returnDiff = 1) if layer.kind == 'Output': if targetWord == None: print("Target Pattern = %s" % "No match") else: if diff == 0.0: print("Target Pattern = '%s'" % targetWord) else: print("Target Pattern = '%s'; difference = %f)" % (targetWord, diff)) actWord, diff = self.getWord( layer.activation, returnDiff = 1 ) if (layer.kind == 'Input' or layer.kind == 'Output'): if actWord == None: print("Matching Pattern = %s" % "No match") else: if diff == 0.0: print("Matching Pattern = '%s'" % actWord) else: print("Matching Pattern = '%s'; difference = %f" % (actWord, diff)) print("------------------------------------")
Displays the network to the screen.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L4443-L4484
Calysto/calysto
calysto/ai/conx.py
SRN.setSequenceType
def setSequenceType(self, value): """ You must set this! """ if value == "ordered-continuous": self.orderedInputs = 1 self.initContext = 0 elif value == "random-segmented": self.orderedInputs = 0 self.initContext = 1 elif value == "random-continuous": self.orderedInputs = 0 self.initContext = 0 elif value == "ordered-segmented": self.orderedInputs = 1 self.initContext = 1 else: raise AttributeError("invalid sequence type: '%s'" % value) self.sequenceType = value
python
def setSequenceType(self, value): """ You must set this! """ if value == "ordered-continuous": self.orderedInputs = 1 self.initContext = 0 elif value == "random-segmented": self.orderedInputs = 0 self.initContext = 1 elif value == "random-continuous": self.orderedInputs = 0 self.initContext = 0 elif value == "ordered-segmented": self.orderedInputs = 1 self.initContext = 1 else: raise AttributeError("invalid sequence type: '%s'" % value) self.sequenceType = value
You must set this!
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L4584-L4602
Calysto/calysto
calysto/ai/conx.py
SRN.addLayers
def addLayers(self, *arg, **kw): """ Creates an N layer network with 'input', 'hidden1', 'hidden2',... and 'output' layers. Keyword type indicates "parallel" or "serial". If only one hidden layer, it is called "hidden". """ netType = "serial" if "type" in kw: netType = kw["type"] self.addLayer('input', arg[0]) hiddens = [] contexts = [] if len(arg) > 3: # FIXME: add context for each hcount = 0 for hidc in arg[1:-1]: name = 'hidden%d' % hcount cname = 'context%d' % hcount self.addLayer(name, hidc) hiddens.append(name) self.addContextLayer(cname, hidc, name) hiddens.append((cname, name)) hcount += 1 elif len(arg) == 3: name = 'hidden' self.addContextLayer('context', arg[1], name) self.addLayer(name, arg[1]) hiddens.append(name) contexts.append(('context', name)) elif len(arg) == 2: pass else: raise AttributeError("not enough layers! need >= 2") self.addLayer('output', arg[-1]) # Connect contexts for (fromName, toName) in contexts: self.connect(fromName, toName) lastName = "input" for name in hiddens: if netType == "parallel": self.connect('input', name) self.connect(name, 'output') else: # serial self.connect(lastName, name) lastName = name if netType == "serial" or lastName == "input": self.connect(lastName, "output")
python
def addLayers(self, *arg, **kw): """ Creates an N layer network with 'input', 'hidden1', 'hidden2',... and 'output' layers. Keyword type indicates "parallel" or "serial". If only one hidden layer, it is called "hidden". """ netType = "serial" if "type" in kw: netType = kw["type"] self.addLayer('input', arg[0]) hiddens = [] contexts = [] if len(arg) > 3: # FIXME: add context for each hcount = 0 for hidc in arg[1:-1]: name = 'hidden%d' % hcount cname = 'context%d' % hcount self.addLayer(name, hidc) hiddens.append(name) self.addContextLayer(cname, hidc, name) hiddens.append((cname, name)) hcount += 1 elif len(arg) == 3: name = 'hidden' self.addContextLayer('context', arg[1], name) self.addLayer(name, arg[1]) hiddens.append(name) contexts.append(('context', name)) elif len(arg) == 2: pass else: raise AttributeError("not enough layers! need >= 2") self.addLayer('output', arg[-1]) # Connect contexts for (fromName, toName) in contexts: self.connect(fromName, toName) lastName = "input" for name in hiddens: if netType == "parallel": self.connect('input', name) self.connect(name, 'output') else: # serial self.connect(lastName, name) lastName = name if netType == "serial" or lastName == "input": self.connect(lastName, "output")
Creates an N layer network with 'input', 'hidden1', 'hidden2',... and 'output' layers. Keyword type indicates "parallel" or "serial". If only one hidden layer, it is called "hidden".
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L4621-L4666
Calysto/calysto
calysto/ai/conx.py
SRN.addThreeLayers
def addThreeLayers(self, inc, hidc, outc): """ Creates a three level network with a context layer. """ self.addLayer('input', inc) self.addContextLayer('context', hidc, 'hidden') self.addLayer('hidden', hidc) self.addLayer('output', outc) self.connect('input', 'hidden') self.connect('context', 'hidden') self.connect('hidden', 'output')
python
def addThreeLayers(self, inc, hidc, outc): """ Creates a three level network with a context layer. """ self.addLayer('input', inc) self.addContextLayer('context', hidc, 'hidden') self.addLayer('hidden', hidc) self.addLayer('output', outc) self.connect('input', 'hidden') self.connect('context', 'hidden') self.connect('hidden', 'output')
Creates a three level network with a context layer.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L4667-L4677
Calysto/calysto
calysto/ai/conx.py
SRN.addSRNLayers
def addSRNLayers(self, inc, hidc, outc): """ Wraps SRN.addThreeLayers() for compatibility. """ self.addThreeLayers(inc, hidc, outc)
python
def addSRNLayers(self, inc, hidc, outc): """ Wraps SRN.addThreeLayers() for compatibility. """ self.addThreeLayers(inc, hidc, outc)
Wraps SRN.addThreeLayers() for compatibility.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L4678-L4682
Calysto/calysto
calysto/ai/conx.py
SRN.addContext
def addContext(self, layer, hiddenLayerName = 'hidden', verbosity = 0): """ Adds a context layer. Necessary to keep self.contextLayers dictionary up to date. """ # better not add context layer first if using sweep() without mapInput SRN.add(self, layer, verbosity) if hiddenLayerName in self.contextLayers: raise KeyError('There is already a context layer associated with this hidden layer.', \ hiddenLayerName) else: self.contextLayers[hiddenLayerName] = layer layer.kind = 'Context'
python
def addContext(self, layer, hiddenLayerName = 'hidden', verbosity = 0): """ Adds a context layer. Necessary to keep self.contextLayers dictionary up to date. """ # better not add context layer first if using sweep() without mapInput SRN.add(self, layer, verbosity) if hiddenLayerName in self.contextLayers: raise KeyError('There is already a context layer associated with this hidden layer.', \ hiddenLayerName) else: self.contextLayers[hiddenLayerName] = layer layer.kind = 'Context'
Adds a context layer. Necessary to keep self.contextLayers dictionary up to date.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L4686-L4697
Calysto/calysto
calysto/ai/conx.py
SRN.copyHiddenToContext
def copyHiddenToContext(self): """ Uses key to identify the hidden layer associated with each layer in the self.contextLayers dictionary. """ for item in list(self.contextLayers.items()): if self.verbosity > 2: print('Hidden layer: ', self.getLayer(item[0]).activation) if self.verbosity > 2: print('Context layer before copy: ', item[1].activation) item[1].copyActivations(self.getLayer(item[0]).activation) if self.verbosity > 2: print('Context layer after copy: ', item[1].activation)
python
def copyHiddenToContext(self): """ Uses key to identify the hidden layer associated with each layer in the self.contextLayers dictionary. """ for item in list(self.contextLayers.items()): if self.verbosity > 2: print('Hidden layer: ', self.getLayer(item[0]).activation) if self.verbosity > 2: print('Context layer before copy: ', item[1].activation) item[1].copyActivations(self.getLayer(item[0]).activation) if self.verbosity > 2: print('Context layer after copy: ', item[1].activation)
Uses key to identify the hidden layer associated with each layer in the self.contextLayers dictionary.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L4700-L4709
Calysto/calysto
calysto/ai/conx.py
SRN.setContext
def setContext(self, value = .5): """ Clears the context layer by setting context layer to (default) value 0.5. """ for context in list(self.contextLayers.values()): context.resetFlags() # hidden activations have already been copied in context.setActivations(value)
python
def setContext(self, value = .5): """ Clears the context layer by setting context layer to (default) value 0.5. """ for context in list(self.contextLayers.values()): context.resetFlags() # hidden activations have already been copied in context.setActivations(value)
Clears the context layer by setting context layer to (default) value 0.5.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L4710-L4716
Calysto/calysto
calysto/ai/conx.py
SRN.step
def step(self, **args): """ SRN.step() Extends network step method by automatically copying hidden layer activations to the context layer. """ if self.sequenceType == None: raise AttributeError("""sequenceType not set! Use SRN.setSequenceType() """) # take care of any params other than layer names: # two ways to clear context: # 1. force it to right now with arg initContext = 1: if 'initContext' in args: if args['initContext']: self.setContext() del args['initContext'] # 2. have initContext be true elif self.initContext: self.setContext() # if initContext is off, then we assume user knows that, # so we reset the flags on all context layers: if self.initContext == 0: for context in list(self.contextLayers.values()): context.activationSet = 1 # replace all patterns for key in args: args[key] = self.replacePatterns( args[key], key ) # Get all of the input/output layer names: inputBankNames = [layer.name for layer in self.layers if layer.kind == 'Input'] outputBankNames = [layer.name for layer in self.layers if layer.kind == 'Output'] inputBankSizes = [layer.size for layer in self.layers if layer.kind == 'Input'] inputBankTotalSize = sum(inputBankSizes) inputArgSizes = [len(args[name]) for name in inputBankNames if name in args] inputArgTotalSize = sum(inputArgSizes) sequenceLength = inputArgTotalSize // inputBankTotalSize learning = self.learning totalRetvals = (0.0, 0, 0) # error, correct, total totalPCorrect = {} for step in range(sequenceLength): if self.verbosity >= 1 or self.interactive: print("-----------------------------------Step #", step + 1) dict = {} dict.update(args) # in case context, or others # now, overwrite input and output, if necessary for name in inputBankNames: if name in args: patternLength = self[name].size offset = step * patternLength if (offset + patternLength) >= len(args[name]): # if this seq is too big, use last part: dict[name] = args[name][-patternLength:] else: # else, go to the right spot in seq: dict[name] = args[name][offset:offset+patternLength] for name in outputBankNames: if name in args: patternLength = self[name].size offset = step * patternLength if (offset + patternLength) >= len(args[name]): # if this seq is too big, use last part: dict[name] = args[name][-patternLength:] else: # else, go to the right spot in seq: dict[name] = args[name][offset:offset+patternLength] # get info for predicition ------------------------- for p in self.prediction: (inName, outName) = p inLayer = self.getLayer(inName) if not inLayer.type == 'Input': raise LayerError('Prediction input layer not type \'Input\'.', inLayer.type) outLayer = self.getLayer(outName) if not outLayer.type == 'Output': raise LayerError('Prediction output layer not type \'Output\'.', outLayer.type) if step == sequenceLength - 1: # last one in sequence; what do we do? start = 0 # wrap to next input vector if not self._sweeping: # not in sweep, in step, no target raise LayerError("Attempting to predict last item in sequence, but using step(). Use sweep() instead.") else: # in a sweep, so get the next pattern if one: if self.currentSweepCount == None: # last item in epoch, predict back to first pattern # Train it to predict first pattern, first sequence item pattern = self.getData(self.loadOrder[0]) for key in pattern: pattern[key] = self.replacePatterns( pattern[key], key ) if inName in inputBankNames: if inName in pattern: dict[outName] = pattern[inName][start:start+patternLength] #dict[outName] = pattern["input"][start:start+patternLength] else: pattern = self.getData(self.loadOrder[self.currentSweepCount+1]) for key in pattern: pattern[key] = self.replacePatterns( pattern[key], key ) if inName in inputBankNames: if inName in pattern: dict[outName] = pattern[inName][start:start+patternLength] #dict[outName] = pattern["input"][start:start+patternLength] else: # in middle of sequence start = (step + 1) * inLayer.size dict[outName] = args[inName][start:start+patternLength] # end predicition code ----------------------------- if step < sequenceLength - 1: # not the last one if not self.learnDuringSequence: self.learning = 0 retvals = self.networkStep(**dict) self.learning = learning # in case we turned it off totalRetvals = list(map(lambda x,y: x+y, totalRetvals[:3], retvals[:3])) sumMerge(totalPCorrect, retvals[3]) totalRetvals.append( totalPCorrect) return totalRetvals
python
def step(self, **args): """ SRN.step() Extends network step method by automatically copying hidden layer activations to the context layer. """ if self.sequenceType == None: raise AttributeError("""sequenceType not set! Use SRN.setSequenceType() """) # take care of any params other than layer names: # two ways to clear context: # 1. force it to right now with arg initContext = 1: if 'initContext' in args: if args['initContext']: self.setContext() del args['initContext'] # 2. have initContext be true elif self.initContext: self.setContext() # if initContext is off, then we assume user knows that, # so we reset the flags on all context layers: if self.initContext == 0: for context in list(self.contextLayers.values()): context.activationSet = 1 # replace all patterns for key in args: args[key] = self.replacePatterns( args[key], key ) # Get all of the input/output layer names: inputBankNames = [layer.name for layer in self.layers if layer.kind == 'Input'] outputBankNames = [layer.name for layer in self.layers if layer.kind == 'Output'] inputBankSizes = [layer.size for layer in self.layers if layer.kind == 'Input'] inputBankTotalSize = sum(inputBankSizes) inputArgSizes = [len(args[name]) for name in inputBankNames if name in args] inputArgTotalSize = sum(inputArgSizes) sequenceLength = inputArgTotalSize // inputBankTotalSize learning = self.learning totalRetvals = (0.0, 0, 0) # error, correct, total totalPCorrect = {} for step in range(sequenceLength): if self.verbosity >= 1 or self.interactive: print("-----------------------------------Step #", step + 1) dict = {} dict.update(args) # in case context, or others # now, overwrite input and output, if necessary for name in inputBankNames: if name in args: patternLength = self[name].size offset = step * patternLength if (offset + patternLength) >= len(args[name]): # if this seq is too big, use last part: dict[name] = args[name][-patternLength:] else: # else, go to the right spot in seq: dict[name] = args[name][offset:offset+patternLength] for name in outputBankNames: if name in args: patternLength = self[name].size offset = step * patternLength if (offset + patternLength) >= len(args[name]): # if this seq is too big, use last part: dict[name] = args[name][-patternLength:] else: # else, go to the right spot in seq: dict[name] = args[name][offset:offset+patternLength] # get info for predicition ------------------------- for p in self.prediction: (inName, outName) = p inLayer = self.getLayer(inName) if not inLayer.type == 'Input': raise LayerError('Prediction input layer not type \'Input\'.', inLayer.type) outLayer = self.getLayer(outName) if not outLayer.type == 'Output': raise LayerError('Prediction output layer not type \'Output\'.', outLayer.type) if step == sequenceLength - 1: # last one in sequence; what do we do? start = 0 # wrap to next input vector if not self._sweeping: # not in sweep, in step, no target raise LayerError("Attempting to predict last item in sequence, but using step(). Use sweep() instead.") else: # in a sweep, so get the next pattern if one: if self.currentSweepCount == None: # last item in epoch, predict back to first pattern # Train it to predict first pattern, first sequence item pattern = self.getData(self.loadOrder[0]) for key in pattern: pattern[key] = self.replacePatterns( pattern[key], key ) if inName in inputBankNames: if inName in pattern: dict[outName] = pattern[inName][start:start+patternLength] #dict[outName] = pattern["input"][start:start+patternLength] else: pattern = self.getData(self.loadOrder[self.currentSweepCount+1]) for key in pattern: pattern[key] = self.replacePatterns( pattern[key], key ) if inName in inputBankNames: if inName in pattern: dict[outName] = pattern[inName][start:start+patternLength] #dict[outName] = pattern["input"][start:start+patternLength] else: # in middle of sequence start = (step + 1) * inLayer.size dict[outName] = args[inName][start:start+patternLength] # end predicition code ----------------------------- if step < sequenceLength - 1: # not the last one if not self.learnDuringSequence: self.learning = 0 retvals = self.networkStep(**dict) self.learning = learning # in case we turned it off totalRetvals = list(map(lambda x,y: x+y, totalRetvals[:3], retvals[:3])) sumMerge(totalPCorrect, retvals[3]) totalRetvals.append( totalPCorrect) return totalRetvals
SRN.step() Extends network step method by automatically copying hidden layer activations to the context layer.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L4722-L4828
Calysto/calysto
calysto/ai/conx.py
SRN.showPerformance
def showPerformance(self): """ SRN.showPerformance() Clears the context layer(s) and then repeatedly cycles through training patterns until the user decides to quit. """ if len(self.inputs) == 0: print('no patterns to test') return self.setContext() while True: BackpropNetwork.showPerformance(self) if self.quitFromSweep: return
python
def showPerformance(self): """ SRN.showPerformance() Clears the context layer(s) and then repeatedly cycles through training patterns until the user decides to quit. """ if len(self.inputs) == 0: print('no patterns to test') return self.setContext() while True: BackpropNetwork.showPerformance(self) if self.quitFromSweep: return
SRN.showPerformance() Clears the context layer(s) and then repeatedly cycles through training patterns until the user decides to quit.
https://github.com/Calysto/calysto/blob/20813c0f48096317aa775d03a5c6b20f12fafc93/calysto/ai/conx.py#L4851-L4864
marshallward/f90nml
f90nml/tokenizer.py
Tokenizer.parse
def parse(self, line): """Tokenize a line of Fortran source.""" tokens = [] self.idx = -1 # Bogus value to ensure idx = 0 after first iteration self.characters = iter(line) self.update_chars() while self.char != '\n': # Update namelist group status if self.char in ('&', '$'): self.group_token = self.char if self.group_token and ( (self.group_token, self.char) in (('&', '/'), ('$', '$'))): self.group_token = False word = '' if self.char in self.whitespace: while self.char in self.whitespace: word += self.char self.update_chars() elif self.char in ('!', '#') or self.group_token is None: # Abort the iteration and build the comment token word = line[self.idx:-1] self.char = '\n' elif self.char in '"\'' or self.prior_delim: word = self.parse_string() elif self.char.isalpha(): word = self.parse_name(line) elif self.char in ('+', '-'): # Lookahead to check for IEEE value self.characters, lookahead = itertools.tee(self.characters) ieee_val = ''.join(itertools.takewhile(str.isalpha, lookahead)) if ieee_val.lower() in ('inf', 'infinity', 'nan'): word = self.char + ieee_val self.characters = lookahead self.prior_char = ieee_val[-1] self.char = next(lookahead, '\n') else: word = self.parse_numeric() elif self.char.isdigit(): word = self.parse_numeric() elif self.char == '.': self.update_chars() if self.char.isdigit(): frac = self.parse_numeric() word = '.' + frac else: word = '.' while self.char.isalpha(): word += self.char self.update_chars() if self.char == '.': word += self.char self.update_chars() elif self.char in Tokenizer.punctuation: word = self.char self.update_chars() else: # This should never happen raise ValueError tokens.append(word) return tokens
python
def parse(self, line): """Tokenize a line of Fortran source.""" tokens = [] self.idx = -1 # Bogus value to ensure idx = 0 after first iteration self.characters = iter(line) self.update_chars() while self.char != '\n': # Update namelist group status if self.char in ('&', '$'): self.group_token = self.char if self.group_token and ( (self.group_token, self.char) in (('&', '/'), ('$', '$'))): self.group_token = False word = '' if self.char in self.whitespace: while self.char in self.whitespace: word += self.char self.update_chars() elif self.char in ('!', '#') or self.group_token is None: # Abort the iteration and build the comment token word = line[self.idx:-1] self.char = '\n' elif self.char in '"\'' or self.prior_delim: word = self.parse_string() elif self.char.isalpha(): word = self.parse_name(line) elif self.char in ('+', '-'): # Lookahead to check for IEEE value self.characters, lookahead = itertools.tee(self.characters) ieee_val = ''.join(itertools.takewhile(str.isalpha, lookahead)) if ieee_val.lower() in ('inf', 'infinity', 'nan'): word = self.char + ieee_val self.characters = lookahead self.prior_char = ieee_val[-1] self.char = next(lookahead, '\n') else: word = self.parse_numeric() elif self.char.isdigit(): word = self.parse_numeric() elif self.char == '.': self.update_chars() if self.char.isdigit(): frac = self.parse_numeric() word = '.' + frac else: word = '.' while self.char.isalpha(): word += self.char self.update_chars() if self.char == '.': word += self.char self.update_chars() elif self.char in Tokenizer.punctuation: word = self.char self.update_chars() else: # This should never happen raise ValueError tokens.append(word) return tokens
Tokenize a line of Fortran source.
https://github.com/marshallward/f90nml/blob/4932cabc5221afc844ee6a5b4a05ceb8bd4a2711/f90nml/tokenizer.py#L32-L105
marshallward/f90nml
f90nml/tokenizer.py
Tokenizer.parse_name
def parse_name(self, line): """Tokenize a Fortran name, such as a variable or subroutine.""" end = self.idx for char in line[self.idx:]: if not char.isalnum() and char not in '\'"_': break end += 1 word = line[self.idx:end] self.idx = end - 1 # Update iterator, minus first character which was already read self.characters = itertools.islice(self.characters, len(word) - 1, None) self.update_chars() return word
python
def parse_name(self, line): """Tokenize a Fortran name, such as a variable or subroutine.""" end = self.idx for char in line[self.idx:]: if not char.isalnum() and char not in '\'"_': break end += 1 word = line[self.idx:end] self.idx = end - 1 # Update iterator, minus first character which was already read self.characters = itertools.islice(self.characters, len(word) - 1, None) self.update_chars() return word
Tokenize a Fortran name, such as a variable or subroutine.
https://github.com/marshallward/f90nml/blob/4932cabc5221afc844ee6a5b4a05ceb8bd4a2711/f90nml/tokenizer.py#L107-L123
marshallward/f90nml
f90nml/tokenizer.py
Tokenizer.parse_string
def parse_string(self): """Tokenize a Fortran string.""" word = '' if self.prior_delim: delim = self.prior_delim self.prior_delim = None else: delim = self.char word += self.char self.update_chars() while True: if self.char == delim: # Check for escaped delimiters self.update_chars() if self.char == delim: word += 2 * delim self.update_chars() else: word += delim break elif self.char == '\n': self.prior_delim = delim break else: word += self.char self.update_chars() return word
python
def parse_string(self): """Tokenize a Fortran string.""" word = '' if self.prior_delim: delim = self.prior_delim self.prior_delim = None else: delim = self.char word += self.char self.update_chars() while True: if self.char == delim: # Check for escaped delimiters self.update_chars() if self.char == delim: word += 2 * delim self.update_chars() else: word += delim break elif self.char == '\n': self.prior_delim = delim break else: word += self.char self.update_chars() return word
Tokenize a Fortran string.
https://github.com/marshallward/f90nml/blob/4932cabc5221afc844ee6a5b4a05ceb8bd4a2711/f90nml/tokenizer.py#L125-L154
marshallward/f90nml
f90nml/tokenizer.py
Tokenizer.parse_numeric
def parse_numeric(self): """Tokenize a Fortran numerical value.""" word = '' frac = False if self.char == '-': word += self.char self.update_chars() while self.char.isdigit() or (self.char == '.' and not frac): # Only allow one decimal point if self.char == '.': frac = True word += self.char self.update_chars() # Check for float exponent if self.char in 'eEdD': word += self.char self.update_chars() if self.char in '+-': word += self.char self.update_chars() while self.char.isdigit(): word += self.char self.update_chars() return word
python
def parse_numeric(self): """Tokenize a Fortran numerical value.""" word = '' frac = False if self.char == '-': word += self.char self.update_chars() while self.char.isdigit() or (self.char == '.' and not frac): # Only allow one decimal point if self.char == '.': frac = True word += self.char self.update_chars() # Check for float exponent if self.char in 'eEdD': word += self.char self.update_chars() if self.char in '+-': word += self.char self.update_chars() while self.char.isdigit(): word += self.char self.update_chars() return word
Tokenize a Fortran numerical value.
https://github.com/marshallward/f90nml/blob/4932cabc5221afc844ee6a5b4a05ceb8bd4a2711/f90nml/tokenizer.py#L156-L184
marshallward/f90nml
f90nml/tokenizer.py
Tokenizer.update_chars
def update_chars(self): """Update the current charters in the tokenizer.""" # NOTE: We spoof non-Unix files by returning '\n' on StopIteration self.prior_char, self.char = self.char, next(self.characters, '\n') self.idx += 1
python
def update_chars(self): """Update the current charters in the tokenizer.""" # NOTE: We spoof non-Unix files by returning '\n' on StopIteration self.prior_char, self.char = self.char, next(self.characters, '\n') self.idx += 1
Update the current charters in the tokenizer.
https://github.com/marshallward/f90nml/blob/4932cabc5221afc844ee6a5b4a05ceb8bd4a2711/f90nml/tokenizer.py#L186-L190