Stores a node and connection index that represent a layer within the network (the nodes and connections at a given depth in a network).
        /// <summary>
        ///     Calculates the output error for each node in the target layer and all hidden layers.  Note that this is a wrapper
        ///     method which takes a signal array, converts it to a double array, and passes that on to the method below.
        /// </summary>
        /// <param name="layers">The discrete layers in the ANN.</param>
        /// <param name="connections">Array of all connections in the ANN.</param>
        /// <param name="nodeActivationValues">The neuron activation values resulting from the last forward pass.</param>
        /// <param name="targetValues">The target values against which the network is being trained.</param>
        /// <param name="nodeActivationFunctions">The activation function for each neuron (this will only differ with HyperNEAT).</param>
        /// <returns>The errors for each output and hidden neuron.</returns>
        public static double[] CalculateErrorSignals(LayerInfo[] layers, FastConnection[] connections,
            double[] nodeActivationValues, ISignalArray outputValues, ISignalArray targetValues, IActivationFunction[] nodeActivationFunctions)
        {
            double[] targets = new double[targetValues.Length];

            // Load the target double array from the input signal array
            targetValues.CopyTo(targets, 0);

            // Return the error signals
            return CalculateErrorSignals(layers, connections, nodeActivationValues, (MappingSignalArray) outputValues, targets, nodeActivationFunctions);
        }
        /// <summary>
        ///     Updates weights based on node error calculations using a given learning rate (momentum isn't taken into
        ///     consideration here).
        /// </summary>
        /// <param name="layers">The discrete layers in the ANN.</param>
        /// <param name="connections">Array of all connections in the ANN.</param>
        /// <param name="learningRate">The learning rate for all connections.</param>
        /// <param name="signalErrors">The errors for each output and hidden neuron.</param>
        /// <param name="nodeActivationValues">The activation function for each neuron (this will only differ with HyperNEAT).</param>
        public static void BackpropagateError(LayerInfo[] layers, FastConnection[] connections, double learningRate,
            double[] signalErrors, double[] nodeActivationValues, MappingSignalArray outputArray)
        {
            int conIdx = 0;

            // Iterate through every layer in a forward pass, calculating the new weights on each connection
            for (int layerIdx = 1; layerIdx < layers.Length; layerIdx++)
            {
                // Start at one layer below the current layer so we can access the source nodes
                LayerInfo layerInfo = layers[layerIdx - 1];
                // Handle the output layer as a special case, calculating the error against the given target
                if (layerIdx == layers.Length - 1)
                {
                    // Calculate the new weight for every connection in the current layer up to the last (i.e. "end")
                    // connection by adding its current weight to the product of the learning rate, target neuron error,
                    // and source neuron output
                    for (; conIdx < layerInfo._endConnectionIdx; conIdx++)
                    {
                        //double sigError = signalErrors[outputArray._map[connections[conIdx]._tgtNeuronIdx]];
                        double sigError = signalErrors[connections[conIdx]._tgtNeuronIdx];
                        connections[conIdx]._weight = connections[conIdx]._weight +
                                                      learningRate * sigError *
                                                      nodeActivationValues[connections[conIdx]._srcNeuronIdx];
                    }
                }
                else
                {
                    // Calculate the new weight for every connection in the current layer up to the last (i.e. "end")
                    // connection by adding its current weight to the product of the learning rate, target neuron error,
                    // and source neuron output
                    for (; conIdx < layerInfo._endConnectionIdx; conIdx++)
                    {
                        connections[conIdx]._weight = connections[conIdx]._weight +
                                                      learningRate * signalErrors[connections[conIdx]._tgtNeuronIdx] *
                                                      nodeActivationValues[connections[conIdx]._srcNeuronIdx];
                    }
                }
            }
        }
        /// <summary>
        ///     Calculates the output error for each node in the target layer and all hidden layers.
        /// </summary>
        /// <param name="layers">The discrete layers in the ANN.</param>
        /// <param name="connections">Array of all connections in the ANN.</param>
        /// <param name="nodeActivationValues">The neuron activation values resulting from the last forward pass.</param>
        /// <param name="targetValues">The target values against which the network is being trained.</param>
        /// <param name="nodeActivationFunctions">The activation function for each neuron (this will only differ with HyperNEAT).</param>
        /// <returns>The errors for each output and hidden neuron.</returns>
        public static double[] CalculateErrorSignals(LayerInfo[] layers, FastConnection[] connections,
            double[] nodeActivationValues, MappingSignalArray outputValues, double[] targetValues, IActivationFunction[] nodeActivationFunctions)
        {
            double[] signalErrors = new double[nodeActivationValues.Length];

            // Get the last connection
            int conIdx = connections.Length - 1;

            // Get the last of the output nodes
            int nodeIdx = nodeActivationValues.Length - 1;

            // Iterate through the layers in reverse, calculating the signal errors
            for (int layerIdx = layers.Length - 1; layerIdx > 0; layerIdx--)
            {
                // Handle the output layer as a special case, calculating the error against the given target
                if (layerIdx == layers.Length - 1)
                {
                    // Calculate the error for every output node with respect to its corresponding target value
                    for (; nodeIdx >= layers[layerIdx - 1]._endNodeIdx; nodeIdx--)
                    {

                        //int targetValuesID = outputValues._map[nodeIdx - outputValues.Length - 1] - outputValues.Length - 1;
                        int targetValuesID = 0;
                        for (int i = 0; i < targetValues.Length; i++)
                        {
                            if (outputValues._map[i] == nodeIdx)
                            {
                                targetValuesID = i;
                                break;
                            }
                        }
                        signalErrors[nodeIdx] =
                            (targetValues[targetValuesID] -
                             nodeActivationValues[nodeIdx])*
                            nodeActivationFunctions[nodeIdx].CalculateDerivative(
                                nodeActivationValues[nodeIdx]);
                        /*
                        int targetValuesID = (targetValues.Length - 1) - ((nodeActivationValues.Length - 1) - nodeIdx);
                            signalErrors[nodeIdx] =
                                (targetValues[targetValuesID] -
                                 outputValues[nodeIdx - layers[layerIdx - 1]._endNodeIdx])*
                                nodeActivationFunctions[nodeIdx].CalculateDerivative(
                                    outputValues[nodeIdx - layers[layerIdx - 1]._endNodeIdx]);
                        */
                    }
                }

                // Otherwise, we're on a hidden layer, so just compute the error with respect to the target
                // node's error in the layer above
                else
                {
                    // Calculate the error for each hidden node with respect to the error of the
                    // target node(s) of the next layer
                    for (; nodeIdx >= layers[layerIdx - 1]._endNodeIdx; nodeIdx--)
                    {
                        double deltas = 0;

                        // Calculate the sum of the products of the target node error and connection weight
                        while (connections[conIdx]._srcNeuronIdx == nodeIdx)
                        {
                            deltas += connections[conIdx]._weight*
                                      signalErrors[connections[conIdx]._tgtNeuronIdx];
                            conIdx--;
                        }

                        // The output error for the hidden node is the then the sum of the errors
                        // plus the derivative of the activation function with respect to the output
                        signalErrors[nodeIdx] = deltas*
                                                nodeActivationFunctions[nodeIdx].CalculateDerivative(
                                                    nodeActivationValues[nodeIdx]);
                    }
                }
            }

            return signalErrors;
        }
        /// <summary>
        /// Construct a FastAcyclicNetwork with provided network definition data structures.
        /// </summary>
        /// <param name="nodeActivationFnArr">Array of neuron activation functions.</param>
        /// <param name="nodeAuxArgsArr">Array of neuron activation function arguments.</param>
        /// <param name="connectionArr">Array of connections.</param>
        /// <param name="layerInfoArr">Array of layer information.</param>
        /// <param name="outputNodeIdxArr">An array that specifies the index of each output neuron within _activationArr.
        /// This is necessary because the neurons have been sorted by their depth in the network structure and are therefore
        /// no longer in their original positions. Note however that the bias and input neurons *are* in their original 
        /// positions as they are defined as being at depth zero.</param>
        /// <param name="nodeCount">Number of nodes in the network.</param>
        /// <param name="inputNodeCount">Number of input nodes in the network.</param>
        /// <param name="outputNodeCount">Number of output nodes in the network.</param>
        public FastAcyclicNetwork(IActivationFunction[] nodeActivationFnArr,
                                  double[][] nodeAuxArgsArr,
                                  FastConnection[] connectionArr,
                                  LayerInfo[] layerInfoArr,
                                  int[] outputNodeIdxArr,
                                  int nodeCount,
                                  int inputNodeCount,
                                  int outputNodeCount)
        {
            // Store refs to network structrue data.
            _nodeActivationFnArr = nodeActivationFnArr;
            _nodeAuxArgsArr = nodeAuxArgsArr;
            _connectionArr = connectionArr;
            _layerInfoArr = layerInfoArr;

            // Create working array for node activation signals.
            _activationArr = new double[nodeCount];

            // Wrap a sub-range of the _activationArr that holds the activation values for the input nodes.
            // Offset is 1 to skip bias neuron (The value at index 1 is the first black box input).
            _inputSignalArrayWrapper = new SignalArray(_activationArr, 1, inputNodeCount);

            // Wrap the output nodes. Nodes have been sorted by depth within the network therefore the output
            // nodes can no longer be guaranteed to be in a contiguous segment at a fixed location. As such their
            // positions are indicated by outputNodeIdxArr, and so we package up this array with the node signal
            // array to abstract away the level of indirection described by outputNodeIdxArr.
            _outputSignalArrayWrapper = new MappingSignalArray(_activationArr, outputNodeIdxArr);

            // Store counts for use during activation.
            _inputNodeCount = inputNodeCount;
            _inputAndBiasNodeCount = inputNodeCount+1;
            _outputNodeCount = outputNodeCount;

            // Initialise the bias neuron's fixed output value.
            _activationArr[0] = 1.0;
        }