/// <summary>
        /// Calculates the error signal on each individual unit within the networks hidden layers.
        ///
        /// Uses the gradient descent method to search the error surface.
        /// </summary>
        /// <param name="hidErr">the calculated hidden unit errors</param>
        /// <param name="outErr">the output unit errors</param>
        /// <param name="nNet">the network undergoing training</param>
        ///
        private void CalcHiddenError(List <List <double> > hidErr,
                                     List <double> outErr, NeuralNet nNet)
        {
            int     nHidden = nNet.NumLayers;
            double  slope = 1, amplify = 1;
            ActiveT unitType = ActiveT.kUnipolar;

            // initialise the the previous layer error with the output layer errors
            List <double> prevErr = new List <double>();

            prevErr.AddRange(outErr);

            // start with the last hidden layer and work back to the first
            for (int i = nHidden; i >= 1; i--)
            {
                List <double> unitInputs = new List <double>();
                List <double> layerErr   = new List <double>();

                // get the weighted connections for the current hidden layer
                NNetWeightedConnect wtConnect = nNet.GetWeightedConnect(i);

                int nUnits   = wtConnect.NumInputNodes;
                int nConnect = wtConnect.NumOutputNodes;

                // get the hidden layer activation unit details
                nNet.GetLayerDetails(i - 1, ref unitType, ref slope, ref amplify);

                // get the hidden layer activation unit input values
                nNet.GetUnitInputs(unitInputs, i - 1);

                // calculate the hidden layer errors
                for (int j = 0; j < nUnits; j++)
                {
                    double error = 0;
                    double xj    = unitInputs[j];

                    for (int k = 0; k < nConnect; k++)
                    {
                        List <double> weights = new List <double>();
                        wtConnect.GetWeightVector(k, weights);

                        // follow the steepest path on the error function by moving along the gradient
                        // of the hidden layer units activation function - the gradient descent method
                        error += GetGradient(unitType, slope, amplify, xj) * prevErr[k] * weights[j];
                    }

                    layerErr.Add(error);
                }

                // update the hidden errors with the current layer error
                // N.B. Since we start from the last hidden layer the
                // hidden layer error signals are stored in reverse order
                hidErr.Add(layerErr);

                // back propagate the layer errors
                prevErr.Clear();
                prevErr = layerErr;
            }
        }
        /// <summary>
        /// Calculates the weight adjustments for the connections into the output layer.
        /// </summary>
        /// <param name="outErr">the output unit errors</param>
        /// <param name="nNet">the network undergoing training</param>
        ///
        private void CalcOutputWtAdjust(List <double> outErr, NeuralNet nNet)
        {
            int           n = nNet.NumLayers, prevIdx = 0;
            List <double> xVec = new List <double>();

            // get the weighted connections between the last hidden layer and the output layer
            NNetWeightedConnect wtConnect = nNet.GetWeightedConnect(n);

            // get the input values for the weighted connections
            nNet.GetActivations(xVec, n - 1);

            int nOut = wtConnect.NumOutputNodes;

            // calculate the weight adjustments for each weighted connection output unit
            for (int i = 0; i < nOut; i++)
            {
                double        ei      = outErr[i];
                List <double> weights = new List <double>();

                // get the output units weight vector
                wtConnect.GetWeightVector(i, weights);

                // calculate the total weight adjustment
                for (int j = 0; j < xVec.Count; j++)
                {
                    // the weight adjustment calculation
                    double dW = mLearnConst * ei * xVec[j];

                    // if the momentum term is greater than 0
                    // the previous weighting needs to be taken into account
                    if (mMomentum > 0)
                    {
                        if (mPrevOutWt.Count > prevIdx)
                        {
                            double dWPrev = mPrevOutWt[prevIdx];

                            // include a percentage of the previous weighting
                            dW += mMomentum * dWPrev;

                            // store the weighting
                            mPrevOutWt[prevIdx] = dW;
                        }
                        else
                        {
                            // store the first weighting
                            mPrevOutWt.Add(dW);
                        }
                    }

                    // the total weight adjustment
                    weights[j] += dW;
                    prevIdx++;
                }

                wtConnect.SetWeightVector(i, weights);
            }

            nNet.SetWeightedConnect(wtConnect, n);
        }
        /// <summary>
        /// Calculates the weight adjustments for the connections into the hidden layers.
        /// </summary>
        /// <param name="hidErrSig">the hidden unit errors</param>
        /// <param name="inputVec">the current training set input values</param>
        /// <param name="nNet">the network undergoing training</param>
        ///
        private void CalcHiddenWtAdjust(List <List <double> > hidErrSig,
                                        List <double> inputVec, NeuralNet nNet)
        {
            List <double> xVec = new List <double>();
            int           maxHidLayIdx = nNet.NumLayers - 1, prevIdx = 0;

            // calculate the weight adjustments for the hidden layers
            for (int n = maxHidLayIdx; n >= 0; n--)
            {
                // get the weighted connections between the current layer and the previous hidden layer
                NNetWeightedConnect wtConnect = nNet.GetWeightedConnect(n);

                // get the hidden unit errors for the previous hidden layer
                // N.B. the hidden error signals are stored in reverse order
                List <double> outErr = hidErrSig[maxHidLayIdx - n];

                if (n == 0)
                {
                    // we are dealing with the input layer
                    xVec = inputVec;
                }
                else
                {
                    // we are dealing with a hidden layer
                    nNet.GetActivations(xVec, n - 1);
                }

                int nOut = wtConnect.NumOutputNodes;

                // calculate the weight adjustments for each weighted connection output unit
                for (int i = 0; i < nOut; i++)
                {
                    double        ei      = outErr[i];
                    List <double> weights = new List <double>();

                    // get the output units weight vector
                    wtConnect.GetWeightVector(i, weights);

                    // calculate the total weight adjustment
                    for (int j = 0; j < xVec.Count; j++)
                    {
                        // the weight adjustment calculation
                        double dW = mLearnConst * ei * xVec[j];

                        // if the momentum term is greater than 0
                        // the previous weighting needs to be taken into account
                        if (mMomentum > 0)
                        {
                            if (mPrevHidWt.Count > prevIdx)
                            {
                                double dWPrev = mPrevHidWt[prevIdx];

                                // include a percentage of the previous weighting
                                dW += mMomentum * dWPrev;

                                // store the weighting
                                mPrevHidWt[prevIdx] = dW;
                            }
                            else
                            {
                                // store the first weighting
                                mPrevHidWt.Add(dW);
                            }
                        }

                        // the total weight adjustment
                        weights[j] += dW;
                        prevIdx++;
                    }

                    wtConnect.SetWeightVector(i, weights);
                }

                nNet.SetWeightedConnect(wtConnect, n);
            }
        }
        /////////////////////////////////////////////////////////////////////
        // Private Methods
        /////////////////////////////////////////////////////////////////////

        /// <summary>
        /// Generates a string representation of this network.
        /// </summary>
        /// <returns>a string representation of this network</returns>
        ///
        private string Serialize()
        {
            List <double> weights = new List <double>();
            StringBuilder outStr  = new StringBuilder();

            // serialize the main details
            int outUnitType = (int)mOutUnitType;    // cast the output unit type enum to an int

            outStr.Append(mNumInputs.ToString());
            outStr.Append(" ");
            outStr.Append(mNumOutputs.ToString());
            outStr.Append(" ");
            outStr.Append(mNumLayers.ToString());
            outStr.Append(" ");
            outStr.Append(outUnitType.ToString());
            outStr.Append(" ");
            outStr.Append(mOutUnitSlope.ToString());
            outStr.Append(" ");
            outStr.Append(mOutUnitAmplify.ToString());
            outStr.Append(" ");

            // serialize the layer data
            for (int i = 0; i <= mNumLayers; i++)       // use <= to include the output layer
            {
                NNetWeightedConnect connect = mLayers[i];
                int    nIn = connect.NumInputNodes;
                int    nOut = connect.NumOutputNodes;
                int    nUnit = 0;
                double sUnit = 0.0, aUnit = 0.0;

                // get the unit type, slope and amplification for the hidden layer
                if (i < mNumLayers)
                {
                    nUnit = (int)mActiveUnits[i];
                }
                if (i < mNumLayers)
                {
                    sUnit = mActiveSlope[i];
                }
                if (i < mNumLayers)
                {
                    aUnit = mActiveAmplify[i];
                }

                outStr.Append("L ");
                outStr.Append(nIn.ToString());
                outStr.Append(" ");
                outStr.Append(nOut.ToString());
                outStr.Append(" ");
                outStr.Append(nUnit.ToString());
                outStr.Append(" ");
                outStr.Append(sUnit.ToString());
                outStr.Append(" ");
                outStr.Append(aUnit.ToString());
                outStr.Append(" ");

                for (int j = 0; j < nOut; j++)
                {
                    connect.GetWeightVector(j, weights);

                    for (int k = 0; k < nIn; k++)
                    {
                        outStr.Append(weights[k].ToString("G16"));
                        outStr.Append(" ");
                    }
                }
            }

            // terminate the output string
            outStr.AppendLine();

            return(outStr.ToString());
        }