/// <summary>
 /// Updates the sigma units depending on the current error by backpropogation, weights and the expected outputs
 /// Makes use of sigmoid function and the derivated version of the sigmoid function
 /// </summary>
 /// <param name="desiredOutput"></param>
 void Adjust_Sigmas(double[] desiredOutput)
 {
     Sigmas = new List <double[]>();
     for (int i = 0; i < Layers.Count; i++)
     {
         Sigmas.Add(new double[Layers[i].PerceptronCount]);
     }
     for (int i = Layers.Count - 1; i >= 0; i--)
     {
         for (int j = 0; j < Layers[i].PerceptronCount; j++)
         {
             if (i == Layers.Count - 1)
             {
                 double y = Layers[i].Layer_Neurons[j].Output;
                 Sigmas[i][j] = (Perceptron.Sigmoid(y) - desiredOutput[j]) * Perceptron.Sigmoid_D(y);
             }
             else
             {
                 double sum = 0;
                 for (int k = 0; k < Layers[i + 1].PerceptronCount; k++)
                 {
                     sum += Layers[i + 1].Layer_Neurons[k].Weights[j] * Sigmas[i + 1][k];
                 }
                 Sigmas[i][j] = Perceptron.Sigmoid_D(Layers[i].Layer_Neurons[j].Output) * sum;
             }
         }
     }
 }
 /// <summary>
 /// Updates delta values depending on the sigma values
 /// </summary>
 private void Update_Delta()
 {
     for (int i = 1; i < Layers.Count; i++)
     {
         for (int j = 0; j < Layers[i].PerceptronCount; j++)
         {
             for (int k = 0; k < Layers[i].Layer_Neurons[j].Weights.Length; k++)
             {
                 Delta[i][j, k] += Sigmas[i][j] * Perceptron.Sigmoid(Layers[i - 1].Layer_Neurons[k].Output);
             }
         }
     }
 }