/// <summary>
        /// Get the error of the network for a single Input
        /// </summary>
        /// <param name="td"></param>
        /// <returns></returns>
        public double GetTotalError(TrainingData td)
        {
            double error = 0;

            //get network Output
            double[] output = CalculateOutput(td);
            for (int k = 0; k < output.Length; k++)
            {
                double diff = output[k] - td.Output[k];
                error += 0.5 * (diff * diff);
            }
            return(error / output.Length);
        }
        public void Train(TrainingData td)
        {
            //Calculate Output to fill layer outputs
            CalculateOutput(td);

            //Calculate deltas
            for (int j = 0; j < Layers[LastLayer].Neurons.Length; j++)
            {
                Layers[LastLayer].Neurons[j].Delta = GetOutputNeuronDelta(Layers[LastLayer].Output[j], td.Output[j]);
            }
            for (int i = LastHiddenLayer; i >= 0; i--)
            {
                for (int j = 0; j < Layers[i].Neurons.Length; j++)
                {
                    Layers[i].Neurons[j].Delta = GetHiddenNeuronDelta(j, Layers[i].Output[j], Layers[i + 1]);
                }
            }
            //Adjust Weights
            for (int i = LastLayer; i > 0; i--)
            {
                //Output and middle layers use the previous layer's Output
                Layer l = Layers[i];
                for (int j = 0; j < l.Neurons.Length; j++)
                {
                    double lrateDt = LRate * l.Neurons[j].Delta;
                    Layers[i].Neurons[j].Bias += lrateDt;
                    for (int k = 0; k < l.Neurons[j].Weights.Length; k++)
                    {
                        Layers[i].Neurons[j].Weights[k] += lrateDt * Layers[i - 1].Output[k];
                    }
                }
            }
            //The first layer uses the Input from the training data, so do that separately
            //to avoid an excess if in the inner loops that would slow things down
            for (int j = 0; j < Layers[0].Neurons.Length; j++)
            {
                double lrateDt = LRate * Layers[0].Neurons[j].Delta;
                Layers[0].Neurons[j].Bias += lrateDt;
                for (int k = 0; k < Layers[0].Neurons[j].Weights.Length; k++)
                {
                    Layers[0].Neurons[j].Weights[k] += lrateDt * td.Input[k];
                }
            }
        }
 /// <summary>
 /// Calculate Output of the network for a given Input
 /// </summary>
 /// <param name="td">Training data object to get Input from</param>
 /// <returns>Output of the Neural Network</returns>
 public double[] CalculateOutput(TrainingData td)
 {
     return(CalculateOutput(td.Input));
 }