github.com/yaricom/goNEAT@v0.0.0-20210507221059-e2110b885482/neat/network/network.go (about)

     1  package network
     2  
     3  import (
     4  	"bytes"
     5  	"errors"
     6  	"fmt"
     7  	"github.com/yaricom/goNEAT/neat/utils"
     8  )
     9  
    10  // A NETWORK is a LIST of input NODEs and a LIST of output NODEs.
    11  // The point of the network is to define a single entity which can evolve
    12  // or learn on its own, even though it may be part of a larger framework.
    13  type Network struct {
    14  	// A network id
    15  	Id int
    16  	// Is a name of this network */
    17  	Name string
    18  	// NNodes that output from the network
    19  	Outputs []*NNode
    20  
    21  	// The number of links in the net (-1 means not yet counted)
    22  	numLinks int
    23  	// A list of all the nodes in the network except MIMO control ones
    24  	allNodes []*NNode
    25  	// NNodes that input into the network
    26  	inputs []*NNode
    27  	// NNodes that connect network modules
    28  	controlNodes []*NNode
    29  }
    30  
    31  // Creates new network
    32  func NewNetwork(in, out, all []*NNode, netId int) *Network {
    33  	n := Network{
    34  		Id:       netId,
    35  		inputs:   in,
    36  		Outputs:  out,
    37  		allNodes: all,
    38  		numLinks: -1,
    39  	}
    40  	return &n
    41  }
    42  
    43  // Creates new modular network with control nodes
    44  func NewModularNetwork(in, out, all, control []*NNode, netId int) *Network {
    45  	n := NewNetwork(in, out, all, netId)
    46  	n.controlNodes = control
    47  	return n
    48  }
    49  
    50  // Creates fast network solver based on the architecture of this network. It's primarily aimed for big networks to improve
    51  // processing speed.
    52  func (n *Network) FastNetworkSolver() (NetworkSolver, error) {
    53  	// calculate neurons per layer
    54  	outputNeuronCount := len(n.Outputs)
    55  	// build bias, input and hidden neurons lists
    56  	biasNeuronCount := 0
    57  	inList := make([]*NNode, 0)
    58  	biasList := make([]*NNode, 0)
    59  	hiddenList := make([]*NNode, 0)
    60  	for _, ne := range n.allNodes {
    61  		switch ne.NeuronType {
    62  		case BiasNeuron:
    63  			biasNeuronCount += 1
    64  			biasList = append(biasList, ne)
    65  		case InputNeuron:
    66  			inList = append(inList, ne)
    67  		case HiddenNeuron:
    68  			hiddenList = append(hiddenList, ne)
    69  		}
    70  	}
    71  	inputNeuronCount := len(inList)
    72  	totalNeuronCount := len(n.allNodes)
    73  
    74  	// create activation functions array
    75  	activations := make([]utils.NodeActivationType, totalNeuronCount)
    76  	neuronLookup := make(map[int]int) // id:index
    77  	neuronIndex := 0
    78  
    79  	// walk through neuron nodes in order: bias, input, output, hidden
    80  	neuronIndex = processList(neuronIndex, biasList, activations, neuronLookup)
    81  	neuronIndex = processList(neuronIndex, inList, activations, neuronLookup)
    82  	neuronIndex = processList(neuronIndex, n.Outputs, activations, neuronLookup)
    83  	neuronIndex = processList(neuronIndex, hiddenList, activations, neuronLookup)
    84  
    85  	// walk through neurons in order: input, output, hidden and create bias and connections lists
    86  	biases := make([]float64, totalNeuronCount)
    87  	connections := make([]*FastNetworkLink, 0)
    88  
    89  	if inConnects, err := processIncomingConnections(inList, biases, neuronLookup); err == nil {
    90  		connections = append(connections, inConnects...)
    91  	} else {
    92  		return nil, err
    93  	}
    94  	if inConnects, err := processIncomingConnections(hiddenList, biases, neuronLookup); err == nil {
    95  		connections = append(connections, inConnects...)
    96  	} else {
    97  		return nil, err
    98  	}
    99  	if inConnects, err := processIncomingConnections(n.Outputs, biases, neuronLookup); err == nil {
   100  		connections = append(connections, inConnects...)
   101  	} else {
   102  		return nil, err
   103  	}
   104  
   105  	// walk through control neurons
   106  	modules := make([]*FastControlNode, len(n.controlNodes))
   107  	for i, cn := range n.controlNodes {
   108  		// collect inputs
   109  		inputs := make([]int, len(cn.Incoming))
   110  		for j, in := range cn.Incoming {
   111  			if inIndex, ok := neuronLookup[in.InNode.Id]; ok {
   112  				inputs[j] = inIndex
   113  			} else {
   114  				return nil, errors.New(
   115  					fmt.Sprintf("Failed to lookup for input neuron with id: %d at control neuron: %d",
   116  						in.InNode.Id, cn.Id))
   117  			}
   118  		}
   119  		// collect outputs
   120  		outputs := make([]int, len(cn.Outgoing))
   121  		for j, out := range cn.Outgoing {
   122  			if outIndex, ok := neuronLookup[out.OutNode.Id]; ok {
   123  				outputs[j] = outIndex
   124  			} else {
   125  				return nil, errors.New(
   126  					fmt.Sprintf("Failed to lookup for output neuron with id: %d at control neuron: %d",
   127  						out.InNode.Id, cn.Id))
   128  			}
   129  		}
   130  		// build control node
   131  		modules[i] = &FastControlNode{InputIndexes: inputs, OutputIndexes: outputs, ActivationType: cn.ActivationType}
   132  	}
   133  
   134  	return NewFastModularNetworkSolver(biasNeuronCount, inputNeuronCount, outputNeuronCount, totalNeuronCount,
   135  		activations, connections, biases, modules), nil
   136  }
   137  
   138  func processList(startIndex int, nList []*NNode, activations []utils.NodeActivationType, neuronLookup map[int]int) int {
   139  	for _, ne := range nList {
   140  		activations[startIndex] = ne.ActivationType
   141  		neuronLookup[ne.Id] = startIndex
   142  		startIndex += 1
   143  	}
   144  	return startIndex
   145  }
   146  
   147  func processIncomingConnections(nList []*NNode, biases []float64, neuronLookup map[int]int) (connections []*FastNetworkLink, err error) {
   148  	connections = make([]*FastNetworkLink, 0)
   149  	for _, ne := range nList {
   150  		if targetIndex, ok := neuronLookup[ne.Id]; ok {
   151  			for _, in := range ne.Incoming {
   152  				if sourceIndex, ok := neuronLookup[in.InNode.Id]; ok {
   153  					if in.InNode.NeuronType == BiasNeuron {
   154  						// store bias for target neuron
   155  						biases[targetIndex] += in.Weight
   156  					} else {
   157  						// save connection
   158  						conn := FastNetworkLink{
   159  							SourceIndex: sourceIndex,
   160  							TargetIndex: targetIndex,
   161  							Weight:      in.Weight,
   162  						}
   163  						connections = append(connections, &conn)
   164  					}
   165  				} else {
   166  					err = fmt.Errorf("failed to lookup for source neuron with id: %d", in.InNode.Id)
   167  					break
   168  				}
   169  			}
   170  		} else {
   171  			err = fmt.Errorf("failed to lookup for target neuron with id: %d", ne.Id)
   172  			break
   173  		}
   174  	}
   175  	if err != nil {
   176  		return nil, err
   177  	}
   178  	return connections, err
   179  }
   180  
   181  // Puts the network back into an initial state
   182  func (n *Network) Flush() (res bool, err error) {
   183  	res = true
   184  	// Flush back recursively
   185  	for _, node := range n.allNodes {
   186  		node.Flushback()
   187  		err = node.FlushbackCheck()
   188  		if err != nil {
   189  			// failed - no need to continue
   190  			res = false
   191  			break
   192  		}
   193  	}
   194  	return res, err
   195  }
   196  
   197  // Prints the values of network outputs to the console
   198  func (n *Network) PrintActivation() string {
   199  	out := bytes.NewBufferString(fmt.Sprintf("Network %s with id %d outputs: (", n.Name, n.Id))
   200  	for i, node := range n.Outputs {
   201  		_, _ = fmt.Fprintf(out, "[Output #%d: %s] ", i, node)
   202  	}
   203  	_, _ = fmt.Fprint(out, ")")
   204  	return out.String()
   205  }
   206  
   207  // Print the values of network inputs to the console
   208  func (n *Network) PrintInput() string {
   209  	out := bytes.NewBufferString(fmt.Sprintf("Network %s with id %d inputs: (", n.Name, n.Id))
   210  	for i, node := range n.inputs {
   211  		_, _ = fmt.Fprintf(out, "[Input #%d: %s] ", i, node)
   212  	}
   213  	_, _ = fmt.Fprint(out, ")")
   214  	return out.String()
   215  }
   216  
   217  // If at least one output is not active then return true
   218  func (n *Network) OutputIsOff() bool {
   219  	for _, node := range n.Outputs {
   220  		if node.ActivationsCount == 0 {
   221  			return true
   222  		}
   223  
   224  	}
   225  	return false
   226  }
   227  
   228  // Attempts to activate the network given number of steps before returning error.
   229  func (n *Network) ActivateSteps(maxSteps int) (bool, error) {
   230  	// For adding to the active sum
   231  	addAmount := 0.0
   232  	// Make sure we at least activate once
   233  	oneTime := false
   234  	// Used in case the output is somehow truncated from the network
   235  	abortCount := 0
   236  
   237  	// Keep activating until all the outputs have become active
   238  	// (This only happens on the first activation, because after that they are always active)
   239  	for n.OutputIsOff() || !oneTime {
   240  
   241  		if abortCount >= maxSteps {
   242  			return false, NetErrExceededMaxActivationAttempts
   243  		}
   244  
   245  		// For each neuron node, compute the sum of its incoming activation
   246  		for _, np := range n.allNodes {
   247  			if np.IsNeuron() {
   248  				np.ActivationSum = 0.0 // reset activation value
   249  
   250  				// For each node's incoming connection, add the activity from the connection to the activesum
   251  				for _, link := range np.Incoming {
   252  					// Handle possible time delays
   253  					if !link.IsTimeDelayed {
   254  						addAmount = link.Weight * link.InNode.GetActiveOut()
   255  						if link.InNode.isActive || link.InNode.IsSensor() {
   256  							np.isActive = true
   257  						}
   258  					} else {
   259  						addAmount = link.Weight * link.InNode.GetActiveOutTd()
   260  					}
   261  					np.ActivationSum += addAmount
   262  				} // End {for} over incoming links
   263  			} // End if != SENSOR
   264  		} // End {for} over all nodes
   265  
   266  		// Now activate all the neuron nodes off their incoming activation
   267  		for _, np := range n.allNodes {
   268  			if np.IsNeuron() {
   269  				// Only activate if some active input came in
   270  				if np.isActive {
   271  					// Now run the net activation through an activation function
   272  					err := ActivateNode(np, utils.NodeActivators)
   273  					if err != nil {
   274  						return false, err
   275  					}
   276  				}
   277  			}
   278  		}
   279  
   280  		// Now activate all MIMO control genes to propagate activation through genome modules
   281  		for _, cn := range n.controlNodes {
   282  			cn.isActive = false
   283  			// Activate control MIMO node as control module
   284  			err := ActivateModule(cn, utils.NodeActivators)
   285  			if err != nil {
   286  				return false, err
   287  			}
   288  			// mark control node as active
   289  			cn.isActive = true
   290  		}
   291  
   292  		oneTime = true
   293  		abortCount += 1
   294  	}
   295  	return true, nil
   296  }
   297  
   298  // Activates the net such that all outputs are active
   299  func (n *Network) Activate() (bool, error) {
   300  	return n.ActivateSteps(20)
   301  }
   302  
   303  // Propagates activation wave through all network nodes provided number of steps in forward direction.
   304  // Returns true if activation wave passed from all inputs to outputs.
   305  func (n *Network) ForwardSteps(steps int) (res bool, err error) {
   306  	for i := 0; i < steps; i++ {
   307  		res, err = n.Activate()
   308  		if err != nil {
   309  			// failure - no need to continue
   310  			break
   311  		}
   312  	}
   313  	return res, err
   314  }
   315  
   316  // Propagates activation wave through all network nodes provided number of steps by recursion from output nodes
   317  // Returns true if activation wave passed from all inputs to outputs.
   318  func (n *Network) RecursiveSteps() (bool, error) {
   319  	return false, errors.New("RecursiveSteps is not implemented")
   320  }
   321  
   322  // Attempts to relax network given amount of steps until giving up. The network considered relaxed when absolute
   323  // value of the change at any given point is less than maxAllowedSignalDelta during activation waves propagation.
   324  // If maxAllowedSignalDelta value is less than or equal to 0, the method will return true without checking for relaxation.
   325  func (n *Network) Relax(_ int, _ float64) (bool, error) {
   326  	return false, errors.New("Relax is not implemented")
   327  }
   328  
   329  // Takes an array of sensor values and loads it into SENSOR inputs ONLY
   330  func (n *Network) LoadSensors(sensors []float64) error {
   331  	counter := 0
   332  	if len(sensors) == len(n.inputs) {
   333  		// BIAS value provided as input
   334  		for _, node := range n.inputs {
   335  			if node.IsSensor() {
   336  				node.SensorLoad(sensors[counter])
   337  				counter += 1
   338  			}
   339  		}
   340  	} else {
   341  		// use default BIAS value
   342  		for _, node := range n.inputs {
   343  			if node.NeuronType == InputNeuron {
   344  				node.SensorLoad(sensors[counter])
   345  				counter += 1
   346  			} else {
   347  				node.SensorLoad(1.0) // default BIAS value
   348  			}
   349  		}
   350  	}
   351  
   352  	return nil
   353  }
   354  
   355  // Read output values from the output nodes of the network
   356  func (n *Network) ReadOutputs() []float64 {
   357  	outs := make([]float64, len(n.Outputs))
   358  	for i, o := range n.Outputs {
   359  		outs[i] = o.Activation
   360  	}
   361  	return outs
   362  }
   363  
   364  // Counts the number of nodes in the net
   365  func (n *Network) NodeCount() int {
   366  	if len(n.controlNodes) == 0 {
   367  		return len(n.allNodes)
   368  	} else {
   369  		return len(n.allNodes) + len(n.controlNodes)
   370  	}
   371  }
   372  
   373  // Counts the number of links in the net
   374  func (n *Network) LinkCount() int {
   375  	n.numLinks = 0
   376  	for _, node := range n.allNodes {
   377  		n.numLinks += len(node.Incoming)
   378  	}
   379  	if len(n.controlNodes) != 0 {
   380  		for _, node := range n.controlNodes {
   381  			n.numLinks += len(node.Incoming)
   382  			n.numLinks += len(node.Outgoing)
   383  		}
   384  	}
   385  	return n.numLinks
   386  }
   387  
   388  // Returns complexity of this network which is sum of nodes count and links count
   389  func (n *Network) Complexity() int {
   390  	return n.NodeCount() + n.LinkCount()
   391  }
   392  
   393  // This checks a POTENTIAL link between a potential in_node
   394  // and potential out_node to see if it must be recurrent.
   395  // Use count and thresh to jump out in the case of an infinite loop.
   396  func (n *Network) IsRecurrent(inNode, outNode *NNode, count *int, thresh int) bool {
   397  	// Count the node as visited
   398  	*count++
   399  
   400  	if *count > thresh {
   401  		return false // Short out the whole thing - loop detected
   402  	}
   403  
   404  	if inNode == outNode {
   405  		return true
   406  	} else {
   407  		// Check back on all links ...
   408  		for _, link := range inNode.Incoming {
   409  			// But skip links that are already recurrent -
   410  			// We want to check back through the forward flow of signals only
   411  			if !link.IsRecurrent {
   412  				if n.IsRecurrent(link.InNode, outNode, count, thresh) {
   413  					return true
   414  				}
   415  			}
   416  		}
   417  	}
   418  	return false
   419  }
   420  
   421  // Find the maximum number of neurons between an output and an input
   422  func (n *Network) MaxDepth() (int, error) {
   423  	if len(n.controlNodes) > 0 {
   424  		return -1, errors.New("unsupported for modular networks")
   425  	}
   426  	// The quick case when there are no hidden nodes
   427  	if len(n.allNodes) == len(n.inputs)+len(n.Outputs) && len(n.controlNodes) == 0 {
   428  		return 1, nil // just one layer depth
   429  	}
   430  
   431  	max := 0 // The max depth
   432  	for _, node := range n.Outputs {
   433  		currDepth, err := node.Depth(0)
   434  		if err != nil {
   435  			return currDepth, err
   436  		}
   437  		if currDepth > max {
   438  			max = currDepth
   439  		}
   440  	}
   441  
   442  	return max, nil
   443  }
   444  
   445  // Returns all nodes in the network
   446  func (n *Network) AllNodes() []*NNode {
   447  	return n.allNodes
   448  }