/// <summary>
    /// This method creates a 1D array containing random weight and bias values.
    /// A 1D Array makes storing the weights easier
    /// The SetWeights method will then take the created array and initialize all arrays with the values
    /// Order: ihweights - hhWeights[] - hoWeights - hBiases[] - oBiases
    /// </summary>
    public void InitializeWeights()
    {
        // make wts
        double lo     = -0.75f;
        double hi     = +0.75f;
        int    numWts = DeepNeuralNetwork.NumWeights(this.nInput, this.nHidden, this.nOutput);

        double[] wts = new double[numWts];
        for (int i = 0; i < numWts; ++i)
        {
            wts[i] = (hi - lo) * rnd.NextDouble() + lo;
        }
        this.SetWeights(wts);
    }
    } // SetWeights

    public void updateWeights(double[] wts, double chance, double mutationRate)
    {
        // scale the weights with the factor
        double[] weights    = this.GetWeights();
        double[] newWeights = new double[DeepNeuralNetwork.NumWeights(this.nInput, this.nHidden, this.nOutput)];

        for (int i = 0; i < wts.Length; i++)
        {
            if (rnd.Next(1, 101) <= mutationRate)
            {
                int x = rnd.Next(1, 101);
                // 20% chance for one of these cases to occure
                if (x <= 20)
                {
                    newWeights[i] = (0.75f - -0.75f) * rnd.NextDouble() + -0.75f;
                }
                else if (x <= 40)
                {
                    newWeights[i] += (0.375f - -0.375f) * rnd.NextDouble() + -0.375f;
                }
                else if (x <= 60)
                {
                    newWeights[i] -= (0.375f - -0.375f) * rnd.NextDouble() + -0.375f;
                }
                else if (x <= 80)
                {
                    newWeights[i] += (0.75f - -0.75f) * rnd.NextDouble() + -0.75f;
                }
                else
                {
                    newWeights[i] -= (0.75f - -0.75f) * rnd.NextDouble() + -0.75f;
                }
            }
            else
            {
                if (rnd.Next(1, 101) <= chance)
                {
                    newWeights[i] = wts[i];
                }
                else
                {
                    newWeights[i] = weights[i];
                }
            }
        }

        // order: ihweights - hhWeights[] - hoWeights - hBiases[] - oBiases
        int nw = NumWeights(this.nInput, this.nHidden, this.nOutput);  // total num wts + biases

        if (wts.Length != nw)
        {
            return;
        }
        int ptr = 0;                                   // pointer into wts[]

        for (int i = 0; i < nInput; ++i)               // input node
        {
            for (int j = 0; j < hNodes[0].Length; ++j) // 1st hidden layer nodes
            {
                ihWeights[i][j] = newWeights[ptr++];
            }
        }

        for (int h = 0; h < nLayers - 1; ++h)               // not last h layer
        {
            for (int j = 0; j < nHidden[h]; ++j)            // from node
            {
                for (int jj = 0; jj < nHidden[h + 1]; ++jj) // to node
                {
                    hhWeights[h][j][jj] = newWeights[ptr++];
                }
            }
        }

        int hi = this.nLayers - 1;  // if 3 hidden layers (0,1,2) last is 3-1 = [2]

        for (int j = 0; j < this.nHidden[hi]; ++j)
        {
            for (int k = 0; k < this.nOutput; ++k)
            {
                hoWeights[j][k] = newWeights[ptr++];
            }
        }

        for (int h = 0; h < nLayers; ++h)  // hidden node biases
        {
            for (int j = 0; j < this.nHidden[h]; ++j)
            {
                hBiases[h][j] = newWeights[ptr++];
            }
        }

        for (int k = 0; k < nOutput; ++k)
        {
            oBiases[k] = newWeights[ptr++];
        }
    } // updateWeights