Beispiel #1
0
        /// <summary>
        /// backpropagation learning algorithm
        /// </summary>
        /// <param name="inputs">the input</param>
        /// <param name="expected_outputs">the expected output</param>
        public override void Learn(float[][] inputs, float[][] expected_outputs)
        {
            base.Learn(inputs, expected_outputs);
            float[] NeuronOutput;
            float   MeanSquareError2;

            CurrentEpoch = 0;
            // for each epoch
            do
            {
                // currently no error
                MeanSquareError = 0f;
                _error_vector   = new float[ANN.NumberOfOutputs];
                // run through set of training data
                for (int i = 0; i < InputTrainingData.Length; i++)
                {
                    MeanSquareError2 = 0f;
                    NeuronOutput     = ANN.Output(inputs[i]);
                    for (int j = 0; j < NeuronOutput.Length; j++)
                    {
                        _error_vector[j] = OutputTrainingData[i][j] - NeuronOutput[j];
                        // update current error
                        MeanSquareError2 += _error_vector[j] * _error_vector[j];
                    }
                    MeanSquareError2 /= 2f;
                    MeanSquareError  += MeanSquareError2;
                    CalculateUsefulVariable(i);
                    // update weight
                    SetWeight(i);
                }
                CurrentEpoch++;
                // we aren't dead
                Pulse();
                // custom check if we have to run further
                if (Convergence())
                {
                    break;
                }
            }while (CurrentEpoch < _maximum_of_epochs && MeanSquareError > _error_treshold);
        }