MappingSignalArray wraps a native array along with an indirection/mapping array. The resulting MappingSignalArray provides indexed access to the underlying native array via a level of indirection/mapping. See SignalArray for more info.
Inheritance: ISignalArray
        /// <summary>
        ///     Updates weights based on node error calculations using a given learning rate (momentum isn't taken into
        ///     consideration here).
        /// </summary>
        /// <param name="layers">The discrete layers in the ANN.</param>
        /// <param name="connections">Array of all connections in the ANN.</param>
        /// <param name="learningRate">The learning rate for all connections.</param>
        /// <param name="signalErrors">The errors for each output and hidden neuron.</param>
        /// <param name="nodeActivationValues">The activation function for each neuron (this will only differ with HyperNEAT).</param>
        public static void BackpropagateError(LayerInfo[] layers, FastConnection[] connections, double learningRate,
            double[] signalErrors, double[] nodeActivationValues, MappingSignalArray outputArray)
        {
            int conIdx = 0;

            // Iterate through every layer in a forward pass, calculating the new weights on each connection
            for (int layerIdx = 1; layerIdx < layers.Length; layerIdx++)
            {
                // Start at one layer below the current layer so we can access the source nodes
                LayerInfo layerInfo = layers[layerIdx - 1];
                // Handle the output layer as a special case, calculating the error against the given target
                if (layerIdx == layers.Length - 1)
                {
                    // Calculate the new weight for every connection in the current layer up to the last (i.e. "end")
                    // connection by adding its current weight to the product of the learning rate, target neuron error,
                    // and source neuron output
                    for (; conIdx < layerInfo._endConnectionIdx; conIdx++)
                    {
                        //double sigError = signalErrors[outputArray._map[connections[conIdx]._tgtNeuronIdx]];
                        double sigError = signalErrors[connections[conIdx]._tgtNeuronIdx];
                        connections[conIdx]._weight = connections[conIdx]._weight +
                                                      learningRate * sigError *
                                                      nodeActivationValues[connections[conIdx]._srcNeuronIdx];
                    }
                }
                else
                {
                    // Calculate the new weight for every connection in the current layer up to the last (i.e. "end")
                    // connection by adding its current weight to the product of the learning rate, target neuron error,
                    // and source neuron output
                    for (; conIdx < layerInfo._endConnectionIdx; conIdx++)
                    {
                        connections[conIdx]._weight = connections[conIdx]._weight +
                                                      learningRate * signalErrors[connections[conIdx]._tgtNeuronIdx] *
                                                      nodeActivationValues[connections[conIdx]._srcNeuronIdx];
                    }
                }
            }
        }
        /// <summary>
        ///     Calculates the output error for each node in the target layer and all hidden layers.
        /// </summary>
        /// <param name="layers">The discrete layers in the ANN.</param>
        /// <param name="connections">Array of all connections in the ANN.</param>
        /// <param name="nodeActivationValues">The neuron activation values resulting from the last forward pass.</param>
        /// <param name="targetValues">The target values against which the network is being trained.</param>
        /// <param name="nodeActivationFunctions">The activation function for each neuron (this will only differ with HyperNEAT).</param>
        /// <returns>The errors for each output and hidden neuron.</returns>
        public static double[] CalculateErrorSignals(LayerInfo[] layers, FastConnection[] connections,
            double[] nodeActivationValues, MappingSignalArray outputValues, double[] targetValues, IActivationFunction[] nodeActivationFunctions)
        {
            double[] signalErrors = new double[nodeActivationValues.Length];

            // Get the last connection
            int conIdx = connections.Length - 1;

            // Get the last of the output nodes
            int nodeIdx = nodeActivationValues.Length - 1;

            // Iterate through the layers in reverse, calculating the signal errors
            for (int layerIdx = layers.Length - 1; layerIdx > 0; layerIdx--)
            {
                // Handle the output layer as a special case, calculating the error against the given target
                if (layerIdx == layers.Length - 1)
                {
                    // Calculate the error for every output node with respect to its corresponding target value
                    for (; nodeIdx >= layers[layerIdx - 1]._endNodeIdx; nodeIdx--)
                    {

                        //int targetValuesID = outputValues._map[nodeIdx - outputValues.Length - 1] - outputValues.Length - 1;
                        int targetValuesID = 0;
                        for (int i = 0; i < targetValues.Length; i++)
                        {
                            if (outputValues._map[i] == nodeIdx)
                            {
                                targetValuesID = i;
                                break;
                            }
                        }
                        signalErrors[nodeIdx] =
                            (targetValues[targetValuesID] -
                             nodeActivationValues[nodeIdx])*
                            nodeActivationFunctions[nodeIdx].CalculateDerivative(
                                nodeActivationValues[nodeIdx]);
                        /*
                        int targetValuesID = (targetValues.Length - 1) - ((nodeActivationValues.Length - 1) - nodeIdx);
                            signalErrors[nodeIdx] =
                                (targetValues[targetValuesID] -
                                 outputValues[nodeIdx - layers[layerIdx - 1]._endNodeIdx])*
                                nodeActivationFunctions[nodeIdx].CalculateDerivative(
                                    outputValues[nodeIdx - layers[layerIdx - 1]._endNodeIdx]);
                        */
                    }
                }

                // Otherwise, we're on a hidden layer, so just compute the error with respect to the target
                // node's error in the layer above
                else
                {
                    // Calculate the error for each hidden node with respect to the error of the
                    // target node(s) of the next layer
                    for (; nodeIdx >= layers[layerIdx - 1]._endNodeIdx; nodeIdx--)
                    {
                        double deltas = 0;

                        // Calculate the sum of the products of the target node error and connection weight
                        while (connections[conIdx]._srcNeuronIdx == nodeIdx)
                        {
                            deltas += connections[conIdx]._weight*
                                      signalErrors[connections[conIdx]._tgtNeuronIdx];
                            conIdx--;
                        }

                        // The output error for the hidden node is the then the sum of the errors
                        // plus the derivative of the activation function with respect to the output
                        signalErrors[nodeIdx] = deltas*
                                                nodeActivationFunctions[nodeIdx].CalculateDerivative(
                                                    nodeActivationValues[nodeIdx]);
                    }
                }
            }

            return signalErrors;
        }