/// <summary> /// Here we create an array with the activation function for each /// neuron in the genome. If we are not interested in using this, /// we get the three basic functions needed: for standard neurons, /// for regulatory neurons and for output neurons. /// </summary> private static void GetActivationFunctions() { // Constructs an array of neuron activation functions. INodeList nodeList = genome.NodeList; int nodeCount = nodeList.Count; IActivationFunctionLibrary activationFnLibrary = genome.ActivationFnLibrary; phenomeVariables.neuronActivationFnArray = new IActivationFunction[nodeCount]; phenomeVariables.neuronAuxArgsArray = new double[nodeCount][]; // Writes the function and auxiliary arguments in the phenome index, // so we use the oldToNewIndex dictionary. for (int i = 0; i < nodeCount; i++) { /* * int cosa4; * if (!oldToNewIndex.TryGetValue(i, out cosa4)) * { * UnityEngine.Debug.Log("ERROR4 " + i); * } */ phenomeVariables.neuronActivationFnArray[oldToNewIndex[i]] = activationFnLibrary.GetFunction(nodeList[i].ActivationFnId); phenomeVariables.neuronAuxArgsArray[oldToNewIndex[i]] = nodeList[i].AuxState; } // Here we get the three basic activation function types. for (int i = phenomeVariables.inputBiasCount; i < nodeCount; i++) { NodeType type = nodeList[i].NodeType; if (type == NodeType.Output) { phenomeVariables.outputNeuronActivFn = activationFnLibrary.GetFunction(nodeList[i].ActivationFnId); } if (type == NodeType.Regulatory) { phenomeVariables.regulatoryActivFn = activationFnLibrary.GetFunction(nodeList[i].ActivationFnId); } if (type == NodeType.Local_Output) { phenomeVariables.normalNeuronActivFn = activationFnLibrary.GetFunction(nodeList[i].ActivationFnId); break; } } }
private static void InternalDecode(INetworkDefinition networkDef, out List <Neuron> neuronList, out List <Connection> connectionList) { // Build a list of neurons. INodeList nodeDefList = networkDef.NodeList; int nodeCount = nodeDefList.Count; neuronList = new List <Neuron>(nodeCount); // A dictionary of neurons keyed on their innovation ID. var neuronDictionary = new Dictionary <uint, Neuron>(nodeCount); // Loop neuron genes. IActivationFunctionLibrary activationFnLib = networkDef.ActivationFnLibrary; for (int i = 0; i < nodeCount; i++) { // Create a Neuron, add it to the neuron list and add an entry into neuronDictionary - // required for next loop. INetworkNode nodeDef = nodeDefList[i]; // Note that we explicitly translate between the two NeuronType enums even though // they define the same types and could therefore be cast from one to the other. // We do this to keep genome and phenome classes completely separated and also to // prevent bugs - e.g. if one of the enums is changed then TranslateNeuronType() will // need to be modified to prevent exceptions at runtime. Otherwise a silent bug may // be introduced. Neuron neuron = new Neuron(nodeDef.Id, nodeDef.NodeType, activationFnLib.GetFunction(nodeDef.ActivationFnId), nodeDef.AuxState); neuronList.Add(neuron); neuronDictionary.Add(nodeDef.Id, neuron); } // Build a list of connections. IConnectionList connectionDefList = networkDef.ConnectionList; int connectionCount = connectionDefList.Count; connectionList = new List <Connection>(connectionCount); // Loop connection genes. for (int i = 0; i < connectionCount; i++) { INetworkConnection connDef = connectionDefList[i]; connectionList.Add( new Connection(neuronDictionary[connDef.SourceNodeId], neuronDictionary[connDef.TargetNodeId], connDef.Weight)); } }
private static void InternalDecode(INetworkDefinition networkDef, int timestepsPerActivation, out FastConnection[] fastConnectionArray, out IActivationFunction[] activationFnArray, out double[][] neuronAuxArgsArray) { // Creates an array of FastConnection(s) that represent the // connectivity of the network. fastConnectionArray = CreateFastConnectionArray(networkDef); // TODO: Test/optimize heuristic - this is just back of envelope maths. // A rough heuristic to decide if we should sort fastConnectionArray // by source neuron index. The principle here is that each activation // loop will be about 2x faster (unconfirmed) if we sort fastConnectionArray, // but sorting takes about n*log2(n) operations. Therefore the // decision to sort depends on length of fastConnectionArray and // _timestepsPerActivation. Another factor here is that small // networks will fit into CPU caches and therefore will not appear // to speed up - however the unsorted data will 'scramble' CPU caches // more than they otherwise would have and thus may slow down other // threads (so we just keep it simple). double len = fastConnectionArray.Length; double timesteps = timestepsPerActivation; if ((len > 2) && (((len * Math.Log(len, 2)) + ((timesteps * len) / 2.0)) < (timesteps * len))) { // Sort fastConnectionArray by source neuron index. Array.Sort(fastConnectionArray, delegate(FastConnection x, FastConnection y) { // Use simple/fast diff method. return(x._srcNeuronIdx - y._srcNeuronIdx); }); } // Construct an array of neuron activation functions. Skip bias and // input neurons as these don't have an activation function // (because they aren't activated). INodeList nodeList = networkDef.NodeList; int nodeCount = nodeList.Count; IActivationFunctionLibrary activationFnLibrary = networkDef.ActivationFnLibrary; activationFnArray = new IActivationFunction[nodeCount]; neuronAuxArgsArray = new double[nodeCount][]; for (int i = 0; i < nodeCount; i++) { activationFnArray[i] = activationFnLibrary.GetFunction(nodeList[i].ActivationFnId); neuronAuxArgsArray[i] = nodeList[i].AuxState; } }
private string getState(NeuronGene neuronGene) { return(((IMarkovActivationFunction)(_fnLib.GetFunction(neuronGene.ActivationFnId))).State); }
/// <summary> /// Creates a AcyclicNetwork from an INetworkDefinition. /// </summary> public static FastAcyclicNetwork CreateFastAcyclicNetwork(INetworkDefinition networkDef) { Debug.Assert(!CyclicNetworkTest.IsNetworkCyclic(networkDef), "Attempt to decode a cyclic network into a FastAcyclicNetwork."); // Determine the depth of each node in the network. // Node depths are used to separate the nodes into depth based layers, these layers can then be // used to determine the order in which signals are propagated through the network. AcyclicNetworkDepthAnalysis depthAnalysis = new AcyclicNetworkDepthAnalysis(); NetworkDepthInfo netDepthInfo = depthAnalysis.CalculateNodeDepths(networkDef); // Construct an array of NodeInfo, ordered by node depth. // Create/populate NodeInfo array. int[] nodeDepthArr = netDepthInfo._nodeDepthArr; INodeList nodeList = networkDef.NodeList; int nodeCount = nodeList.Count; NodeInfo[] nodeInfoByDepth = new NodeInfo[nodeCount]; for (int i = 0; i < nodeCount; i++) { nodeInfoByDepth[i]._nodeId = nodeList[i].Id; nodeInfoByDepth[i]._definitionIdx = i; nodeInfoByDepth[i]._nodeDepth = nodeDepthArr[i]; } // Sort NodeInfo array. // We use an IComparer here because an anonymous method is not accepted on the method overload that accepts // a sort range, which we use to avoid sorting the input and bias nodes. Sort() performs an unstable sort therefore // we must restrict the range of the sort to ensure the input and bias node indexes are unchanged. Restricting the // sort to the required range is also more efficient (less items to sort). int inputAndBiasCount = networkDef.InputNodeCount + 1; Array.Sort(nodeInfoByDepth, inputAndBiasCount, nodeCount - inputAndBiasCount, NodeDepthComparer.__NodeDepthComparer); // Array of live node indexes indexed by their index in the original network definition. This allows us to // locate the position of input and output nodes in their new positions in the live network data structures. int[] newIdxByDefinitionIdx = new int[nodeCount]; // Dictionary of live node indexes keyed by node ID. This allows us to convert the network definition connection // endpoints from node IDs to indexes into the live/runtime network data structures. Dictionary <uint, int> newIdxById = new Dictionary <uint, int>(nodeCount); // Populate both the lookup array and dictionary. for (int i = 0; i < nodeCount; i++) { NodeInfo nodeInfo = nodeInfoByDepth[i]; newIdxByDefinitionIdx[nodeInfo._definitionIdx] = i; newIdxById.Add(nodeInfo._nodeId, i); } // Make a copy of the sub-range of newIdxByDefinitionIdx that represents the output nodes. int outputCount = networkDef.OutputNodeCount; int[] outputNeuronIdxArr = new int[outputCount]; // Note. 'inputAndBiasCount' holds the index of the first output node. Array.Copy(newIdxByDefinitionIdx, inputAndBiasCount, outputNeuronIdxArr, 0, outputCount); // Construct arrays with additional 'per node' data/refs (activation functions, activation fn auxiliary data). IActivationFunctionLibrary activationFnLibrary = networkDef.ActivationFnLibrary; IActivationFunction[] nodeActivationFnArr = new IActivationFunction[nodeCount]; double[][] nodeAuxArgsArray = new double[nodeCount][]; for (int i = 0; i < nodeCount; i++) { int definitionIdx = nodeInfoByDepth[i]._definitionIdx; nodeActivationFnArr[i] = activationFnLibrary.GetFunction(nodeList[definitionIdx].ActivationFnId); nodeAuxArgsArray[i] = nodeList[definitionIdx].AuxState; } //=== Create array of FastConnection(s). // Loop the connections and lookup the node IDs for each connection's end points using newIdxById. IConnectionList connectionList = networkDef.ConnectionList; int connectionCount = connectionList.Count; FastConnection[] fastConnectionArray = new FastConnection[connectionCount]; for (int i = 0; i < connectionCount; i++) { INetworkConnection conn = connectionList[i]; fastConnectionArray[i]._srcNeuronIdx = newIdxById[conn.SourceNodeId]; fastConnectionArray[i]._tgtNeuronIdx = newIdxById[conn.TargetNodeId]; fastConnectionArray[i]._weight = conn.Weight; } // Sort fastConnectionArray by source node index. This allows us to activate the connections in the // order they are present within the network (by depth). We also secondary sort by target index to // improve CPU cache coherency of the data (in order accesses that are as close to each other as possible). Array.Sort(fastConnectionArray, delegate(FastConnection x, FastConnection y) { if (x._srcNeuronIdx < y._srcNeuronIdx) { return(-1); } if (x._srcNeuronIdx > y._srcNeuronIdx) { return(1); } // Secondary sort on target index. if (x._tgtNeuronIdx < y._tgtNeuronIdx) { return(-1); } if (x._tgtNeuronIdx > y._tgtNeuronIdx) { return(1); } // Connections are equal (this should not actually happen). return(0); }); // Create an array of LayerInfo(s). Each LayerInfo contains the index + 1 of both the last node and last // connection in that layer. // The array is in order of depth, from layer zero (bias and inputs nodes) to the last layer // (usually output nodes, but not necessarily if there is a dead end pathway with a high number of hops). // Note. There is guaranteed to be at least one connection with a source at a given depth level, this is // because for there to be a layer N there must necessarily be a connection from a node in layer N-1 // to a node in layer N. int netDepth = netDepthInfo._networkDepth; LayerInfo[] layerInfoArr = new LayerInfo[netDepth]; // Scanning over nodes can start at inputAndBiasCount instead of zero, // because we know that all nodes prior to that index are at depth zero. int nodeIdx = inputAndBiasCount; int connIdx = 0; for (int currDepth = 0; currDepth < netDepth; currDepth++) { // Scan for last node at the current depth. for (; nodeIdx < nodeCount && nodeInfoByDepth[nodeIdx]._nodeDepth == currDepth; nodeIdx++) { ; } // Scan for last connection at the current depth. for (; connIdx < fastConnectionArray.Length && nodeInfoByDepth[fastConnectionArray[connIdx]._srcNeuronIdx]._nodeDepth == currDepth; connIdx++) { ; } // Store node and connection end indexes for the layer. layerInfoArr[currDepth]._endNodeIdx = nodeIdx; layerInfoArr[currDepth]._endConnectionIdx = connIdx; } return(new FastAcyclicNetwork(nodeActivationFnArr, nodeAuxArgsArray, fastConnectionArray, layerInfoArr, outputNeuronIdxArr, nodeCount, networkDef.InputNodeCount, networkDef.OutputNodeCount)); }