Esempio n. 1
0
        public void Train(double learningRate, int epochs, double momentum, List <TrainingElement> inputs, double desiredError = 0)
        {
            ResetLayers();
            Errors.Clear();
            for (var i = 0; i < epochs; i++)
            {
                List <double> epochsErrors = new List <double>();
                for (var j = 0; j < inputs.Count; j++)
                {
                    var guess = ForwardPropagation(inputs[j].Input);

                    epochsErrors.Add(MeanSquaredError(guess, inputs[j].DesiredOutput));

                    //an equation for the error in the output layer, δL
                    var outputLayer       = Layers.Last();
                    var sigmoidDerivative =
                        outputLayer.ActivationFunction.CalculateDifferential(outputLayer.WeightedSum);                  //∇aC=(aL−y)
                    outputLayer.DeltaL = guess.Subtract(inputs[j].DesiredOutput).PointwiseMultiply(sigmoidDerivative);  //δL

                    //an equation for the error δl in terms of the error in the next layer, δl + 1
                    for (var k = Layers.Count - 2; k >= 0; k--)
                    {
                        Layers[k].Backpropagate(Layers[k + 1]);
                    }

                    Layers.First().UpdateLayer(inputs[j].Input, learningRate, momentum);

                    for (var k = 1; k < Layers.Count; k++)
                    {
                        Layers[k].UpdateLayer(Layers[k - 1].Activation, learningRate, momentum);
                    }
                }
                Errors.Add(epochsErrors.Sum() / inputs.Count);
                if (Errors[i] <= desiredError)
                {
                    break;
                }
                Gatherer.GatherStatistics(this);
            }
        }