github.com/yaricom/goNEAT@v0.0.0-20210507221059-e2110b885482/neat/network/fast_network.go (about) 1 package network 2 3 import ( 4 "errors" 5 "fmt" 6 "github.com/yaricom/goNEAT/neat/utils" 7 "math" 8 ) 9 10 // The connection descriptor for fast network 11 type FastNetworkLink struct { 12 // The index of source neuron 13 SourceIndex int 14 // The index of target neuron 15 TargetIndex int 16 // The weight of this link 17 Weight float64 18 // The signal relayed by this link 19 Signal float64 20 } 21 22 // The module relay (control node) descriptor for fast network 23 type FastControlNode struct { 24 // The activation function for control node 25 ActivationType utils.NodeActivationType 26 // The indexes of the input nodes 27 InputIndexes []int 28 // The indexes of the output nodes 29 OutputIndexes []int 30 } 31 32 // The fast modular network solver implementation to be used for big neural networks simulation. 33 type FastModularNetworkSolver struct { 34 // A network id 35 Id int 36 // Is a name of this network */ 37 Name string 38 39 // The current activation values per each neuron 40 neuronSignals []float64 41 // This array is a parallel of neuronSignals and used to test network relaxation 42 neuronSignalsBeingProcessed []float64 43 44 // The activation functions per neuron, must be in the same order as neuronSignals. Has nil entries for 45 // neurons that are inputs or outputs of a module. 46 activationFunctions []utils.NodeActivationType 47 // The bias values associated with neurons 48 biasList []float64 49 // The control nodes relaying between network modules 50 modules []*FastControlNode 51 // The connections 52 connections []*FastNetworkLink 53 54 // The number of input neurons 55 inputNeuronCount int 56 // The total number of sensors in the network (input + bias). This is also the index of the first output neuron in the neuron signals. 57 sensorNeuronCount int 58 // The number of output neurons 59 outputNeuronCount int 60 // The bias neuron count (usually one). This is also the index of the first input neuron in the neuron signals. 61 biasNeuronCount int 62 // The total number of neurons in network 63 totalNeuronCount int 64 65 // For recursive activation, marks whether we have finished this node yet 66 activated []bool 67 // For recursive activation, makes whether a node is currently being calculated (recurrent connections processing) 68 inActivation []bool 69 // For recursive activation, the previous activation values of recurrent connections (recurrent connections processing) 70 lastActivation []float64 71 72 // The adjacent list to hold IDs of outgoing nodes for each network node 73 adjacentList [][]int 74 // The adjacent list to hold IDs of incoming nodes for each network node 75 reverseAdjacentList [][]int 76 // The adjacent matrix to hold connection weights between all connected nodes 77 adjacentMatrix [][]float64 78 } 79 80 // Creates new fast modular network solver 81 func NewFastModularNetworkSolver(biasNeuronCount, inputNeuronCount, outputNeuronCount, totalNeuronCount int, 82 activationFunctions []utils.NodeActivationType, connections []*FastNetworkLink, 83 biasList []float64, modules []*FastControlNode) *FastModularNetworkSolver { 84 85 fmm := FastModularNetworkSolver{ 86 biasNeuronCount: biasNeuronCount, 87 inputNeuronCount: inputNeuronCount, 88 sensorNeuronCount: biasNeuronCount + inputNeuronCount, 89 outputNeuronCount: outputNeuronCount, 90 totalNeuronCount: totalNeuronCount, 91 activationFunctions: activationFunctions, 92 biasList: biasList, 93 modules: modules, 94 connections: connections, 95 } 96 97 // Allocate the arrays that store the states at different points in the neural network. 98 // The neuron signals are initialised to 0 by default. Only bias nodes need setting to 1. 99 fmm.neuronSignals = make([]float64, totalNeuronCount) 100 fmm.neuronSignalsBeingProcessed = make([]float64, totalNeuronCount) 101 for i := 0; i < biasNeuronCount; i++ { 102 fmm.neuronSignals[i] = 1.0 // BIAS neuron signal 103 } 104 105 // Allocate activation arrays 106 fmm.activated = make([]bool, totalNeuronCount) 107 fmm.inActivation = make([]bool, totalNeuronCount) 108 fmm.lastActivation = make([]float64, totalNeuronCount) 109 110 // Build adjacent lists and matrix for fast access of incoming/outgoing nodes and connection weights 111 fmm.adjacentList = make([][]int, totalNeuronCount) 112 fmm.reverseAdjacentList = make([][]int, totalNeuronCount) 113 fmm.adjacentMatrix = make([][]float64, totalNeuronCount) 114 115 for i := 0; i < totalNeuronCount; i++ { 116 fmm.adjacentList[i] = make([]int, 0) 117 fmm.reverseAdjacentList[i] = make([]int, 0) 118 fmm.adjacentMatrix[i] = make([]float64, totalNeuronCount) 119 } 120 121 for i := 0; i < len(connections); i++ { 122 crs := connections[i].SourceIndex 123 crt := connections[i].TargetIndex 124 // Holds outgoing nodes 125 fmm.adjacentList[crs] = append(fmm.adjacentList[crs], crt) 126 // Holds incoming nodes 127 fmm.reverseAdjacentList[crt] = append(fmm.reverseAdjacentList[crt], crs) 128 // Holds link weight 129 fmm.adjacentMatrix[crs][crt] = connections[i].Weight 130 } 131 132 return &fmm 133 } 134 135 // Propagates activation wave through all network nodes provided number of steps in forward direction. 136 // Returns true if activation wave passed from all inputs to the outputs. 137 func (fmm *FastModularNetworkSolver) ForwardSteps(steps int) (res bool, err error) { 138 for i := 0; i < steps; i++ { 139 if res, err = fmm.forwardStep(0); err != nil { 140 return false, err 141 } 142 } 143 return res, nil 144 } 145 146 // Propagates activation wave through all network nodes provided number of steps by recursion from output nodes 147 // Returns true if activation wave passed from all inputs to the outputs. This method is preferred method 148 // of network activation when number of forward steps can not be easy calculated and no network modules are set. 149 func (fmm *FastModularNetworkSolver) RecursiveSteps() (res bool, err error) { 150 if len(fmm.modules) > 0 { 151 return false, errors.New("recursive activation can not be used for network with defined modules") 152 } 153 154 // Initialize boolean arrays and set the last activation signal for output/hidden neurons 155 for i := 0; i < fmm.totalNeuronCount; i++ { 156 // Set as activated if i is an input node, otherwise ensure it is unactivated (false) 157 fmm.activated[i] = i < fmm.sensorNeuronCount 158 159 fmm.inActivation[i] = false 160 // set last activation for output/hidden neurons 161 if i >= fmm.sensorNeuronCount { 162 fmm.lastActivation[i] = fmm.neuronSignals[i] 163 } 164 } 165 166 // Get each output node activation recursively 167 for i := 0; i < fmm.outputNeuronCount; i++ { 168 index := fmm.sensorNeuronCount + i 169 if res, err = fmm.recursiveActivateNode(index); err != nil { 170 return false, err 171 } else if !res { 172 return false, fmt.Errorf("failed to recursively activate the output neuron at %d", index) 173 } 174 } 175 176 return res, nil 177 } 178 179 // Propagate activation wave by recursively looking for input signals graph for a given output neuron 180 func (fmm *FastModularNetworkSolver) recursiveActivateNode(currentNode int) (res bool, err error) { 181 // If we've reached an input node then return since the signal is already set 182 if fmm.activated[currentNode] { 183 fmm.inActivation[currentNode] = false 184 return true, nil 185 } 186 // Mark that the node is currently being calculated 187 fmm.inActivation[currentNode] = true 188 189 // Set the pre-signal to 0 190 fmm.neuronSignalsBeingProcessed[currentNode] = 0 191 192 // Adjacency list in reverse holds incoming connections, go through each one and activate it 193 for i := 0; i < len(fmm.reverseAdjacentList[currentNode]); i++ { 194 currentAdjNode := fmm.reverseAdjacentList[currentNode][i] 195 196 // If this node is currently being activated then we have reached a cycle, or recurrent connection. 197 // Use the previous activation in this case 198 if fmm.inActivation[currentAdjNode] { 199 fmm.neuronSignalsBeingProcessed[currentNode] += fmm.lastActivation[currentAdjNode] * fmm.adjacentMatrix[currentAdjNode][currentNode] 200 } else { 201 // Otherwise proceed as normal 202 // Recurse if this neuron has not been activated yet 203 if !fmm.activated[currentAdjNode] { 204 res, err = fmm.recursiveActivateNode(currentAdjNode) 205 if err != nil { 206 // recursive activation failed 207 return false, err 208 } else if !res { 209 return false, fmt.Errorf("failed to recursively activate neuron at %d", currentAdjNode) 210 } 211 } 212 213 // Add it to the new activation 214 fmm.neuronSignalsBeingProcessed[currentNode] += fmm.neuronSignals[currentAdjNode] * fmm.adjacentMatrix[currentAdjNode][currentNode] 215 } 216 } 217 218 // Mark this neuron as completed 219 fmm.activated[currentNode] = true 220 221 // This is no longer being calculated (for cycle detection) 222 fmm.inActivation[currentNode] = false 223 224 // Set this signal after running it through the activation function 225 if fmm.neuronSignals[currentNode], err = utils.NodeActivators.ActivateByType( 226 fmm.neuronSignalsBeingProcessed[currentNode], nil, 227 fmm.activationFunctions[currentNode]); err != nil { 228 // failed to activate 229 res = false 230 } else { 231 res = true 232 } 233 return res, err 234 } 235 236 // Attempts to relax network given amount of steps until giving up. The network considered relaxed when absolute 237 // value of the change at any given point is less than maxAllowedSignalDelta during activation waves propagation. 238 // If maxAllowedSignalDelta value is less than or equal to 0, the method will return true without checking for relaxation. 239 func (fmm *FastModularNetworkSolver) Relax(maxSteps int, maxAllowedSignalDelta float64) (relaxed bool, err error) { 240 for i := 0; i < maxSteps; i++ { 241 if relaxed, err = fmm.forwardStep(maxAllowedSignalDelta); err != nil { 242 return false, err 243 } else if relaxed { 244 break // no need to iterate any further, already reached desired accuracy 245 } 246 } 247 return relaxed, nil 248 } 249 250 // Performs single forward step through the network and tests if network become relaxed. The network considered relaxed 251 // when absolute value of the change at any given point is less than maxAllowedSignalDelta during activation waves propagation. 252 func (fmm *FastModularNetworkSolver) forwardStep(maxAllowedSignalDelta float64) (isRelaxed bool, err error) { 253 isRelaxed = true 254 255 // Calculate output signal per each connection and add the signals to the target neurons 256 for _, conn := range fmm.connections { 257 fmm.neuronSignalsBeingProcessed[conn.TargetIndex] += fmm.neuronSignals[conn.SourceIndex] * conn.Weight 258 } 259 260 // Pass the signals through the single-valued activation functions 261 for i := fmm.sensorNeuronCount; i < fmm.totalNeuronCount; i++ { 262 signal := fmm.neuronSignalsBeingProcessed[i] 263 if fmm.biasNeuronCount > 0 { 264 // append BIAS value to the signal if appropriate 265 signal += fmm.biasList[i] 266 } 267 268 if fmm.neuronSignalsBeingProcessed[i], err = utils.NodeActivators.ActivateByType( 269 signal, nil, fmm.activationFunctions[i]); err != nil { 270 return false, err 271 } 272 } 273 274 // Pass the signals through each module (activation function with more than one input or output) 275 for _, module := range fmm.modules { 276 inputs := make([]float64, len(module.InputIndexes)) 277 for i, inIndex := range module.InputIndexes { 278 inputs[i] = fmm.neuronSignalsBeingProcessed[inIndex] 279 } 280 if outputs, err := utils.NodeActivators.ActivateModuleByType(inputs, nil, module.ActivationType); err == nil { 281 // save outputs 282 for i, outIndex := range module.OutputIndexes { 283 fmm.neuronSignalsBeingProcessed[outIndex] = outputs[i] 284 } 285 } else { 286 return false, err 287 } 288 } 289 290 // Move all the neuron signals we changed while processing this network activation into storage. 291 if maxAllowedSignalDelta <= 0 { 292 // iterate through output and hidden neurons and collect activations 293 for i := fmm.sensorNeuronCount; i < fmm.totalNeuronCount; i++ { 294 fmm.neuronSignals[i] = fmm.neuronSignalsBeingProcessed[i] 295 fmm.neuronSignalsBeingProcessed[i] = 0 296 } 297 } else { 298 for i := fmm.sensorNeuronCount; i < fmm.totalNeuronCount; i++ { 299 // First check whether any location in the network has changed by more than a small amount. 300 isRelaxed = isRelaxed && !(math.Abs(fmm.neuronSignals[i]-fmm.neuronSignalsBeingProcessed[i]) > maxAllowedSignalDelta) 301 302 fmm.neuronSignals[i] = fmm.neuronSignalsBeingProcessed[i] 303 fmm.neuronSignalsBeingProcessed[i] = 0 304 } 305 } 306 307 return isRelaxed, err 308 } 309 310 // Flushes network state by removing all current activations. Returns true if network flushed successfully or 311 // false in case of error. 312 func (fmm *FastModularNetworkSolver) Flush() (bool, error) { 313 for i := fmm.biasNeuronCount; i < fmm.totalNeuronCount; i++ { 314 fmm.neuronSignals[i] = 0.0 315 } 316 return true, nil 317 } 318 319 // Set sensors values to the input nodes of the network 320 func (fmm *FastModularNetworkSolver) LoadSensors(inputs []float64) error { 321 if len(inputs) == fmm.inputNeuronCount { 322 // only inputs should be provided 323 for i := 0; i < fmm.inputNeuronCount; i++ { 324 fmm.neuronSignals[fmm.biasNeuronCount+i] = inputs[i] 325 } 326 } else { 327 return NetErrUnsupportedSensorsArraySize 328 } 329 return nil 330 } 331 332 // Read output values from the output nodes of the network 333 func (fmm *FastModularNetworkSolver) ReadOutputs() []float64 { 334 return fmm.neuronSignals[fmm.sensorNeuronCount : fmm.sensorNeuronCount+fmm.outputNeuronCount] 335 } 336 337 // Returns the total number of neural units in the network 338 func (fmm *FastModularNetworkSolver) NodeCount() int { 339 return fmm.totalNeuronCount + len(fmm.modules) 340 } 341 342 // Returns the total number of links between nodes in the network 343 func (fmm *FastModularNetworkSolver) LinkCount() int { 344 // count all connections 345 numLinks := len(fmm.connections) 346 347 // count all bias links if any 348 if fmm.biasNeuronCount > 0 { 349 for _, b := range fmm.biasList { 350 if b != 0 { 351 numLinks++ 352 } 353 } 354 } 355 356 // count all modules links 357 if len(fmm.modules) != 0 { 358 for _, module := range fmm.modules { 359 numLinks += len(module.InputIndexes) + len(module.OutputIndexes) 360 } 361 } 362 return numLinks 363 } 364 365 // Stringer 366 func (fmm *FastModularNetworkSolver) String() string { 367 str := fmt.Sprintf("FastModularNetwork, id: %d, name: [%s], neurons: %d,\n\tinputs: %d,\tbias: %d,\toutputs:%d,\t hidden: %d", 368 fmm.Id, fmm.Name, fmm.totalNeuronCount, fmm.inputNeuronCount, fmm.biasNeuronCount, fmm.outputNeuronCount, 369 fmm.totalNeuronCount-fmm.sensorNeuronCount-fmm.outputNeuronCount) 370 return str 371 }