/// <summary>
        /// The Nguyen-Widrow initialization algorithm is the following :
        /// 
        /// 1. Initialize all weight of hidden layers with (ranged) random values
        /// 2. For each hidden layer
        /// 2.1 calculate beta value, 0.7 * Nth(#neurons of input layer) root of
        /// #neurons of current layer 
        /// 2.2 for each synapse
        /// 2.1.1 for each weight 
        /// 2.1.2 Adjust weight by dividing by norm of weight for neuron and
        /// multiplying by beta value
        /// </summary>
        /// <param name="network">The network to randomize.</param>
        public override void Randomize(BasicNetwork network)
        {
            base.Randomize(network);
            int neuronCount = 0;

            foreach (ILayer layer in network.Structure.Layers)
            {
                neuronCount += layer.NeuronCount;
            }

            ILayer inputLayer = network.GetLayer(BasicNetwork.TAG_INPUT);
            ILayer outputLayer = network.GetLayer(BasicNetwork.TAG_OUTPUT);

            if (inputLayer == null)
                throw new EncogError("Must have an input layer for Nguyen-Widrow.");

            if (outputLayer == null)
                throw new EncogError("Must have an output layer for Nguyen-Widrow.");

            int hiddenNeurons = neuronCount - inputLayer.NeuronCount
                    - outputLayer.NeuronCount;

            if (hiddenNeurons < 1)
                throw new EncogError("Must have hidden neurons for Nguyen-Widrow.");

            double beta = 0.7 * Math.Pow(hiddenNeurons, 1.0 / inputLayer
                    .NeuronCount);

            foreach (ISynapse synapse in network.Structure.Synapses)
            {
                Randomize(beta, synapse);
            }

            network.Structure.FlatUpdate = FlatUpdateNeeded.Flatten;
            network.Structure.FlattenWeights();
        }