Exemple #1
0
        protected override double CalculateError(double[] desiredOutput)
        {
            double sumOfSquaredErrors = 0.0;
            int    layersCount        = Layers.Length;

            double[][] networkErrors  = _networkErrors.Value;
            double[][] networkOutputs = _networkOutputs.Value;

            // Assume that all network neurons have the same activation function
            IActivationFunction function = Layers[0].Neurons[0].ActivationFunction;

            // 1. Calculate error values for last layer first.
            double[] layerOutputs = networkOutputs[layersCount - 1];
            double[] errors       = networkErrors[layersCount - 1];

            for (int i = 0; i < errors.Length; i++)
            {
                double output = layerOutputs[i];
                double e      = output - desiredOutput[i];
                errors[i]           = e * function.Derivative2Function(output);
                sumOfSquaredErrors += e * e;
            }

            // 2. Calculate errors for all other layers
            for (int j = layersCount - 2; j >= 0; j--)
            {
                errors       = networkErrors[j];
                layerOutputs = networkOutputs[j];


                var      layerNext  = Layers[j + 1] as ActivationLayerRProp;
                double[] nextErrors = networkErrors[j + 1];

                // For all neurons of this layer
                for (int i = 0; i < errors.Length; i++)
                {
                    double sum = 0.0;

                    // For all neurons of the next layer
                    for (int k = 0; k < nextErrors.Length; k++)
                    {
                        sum += nextErrors[k] * layerNext.Neurons[k].Weights[i];
                    }

                    errors[i] = sum * function.Derivative2Function(layerOutputs[i]);
                }
            }

            return(sumOfSquaredErrors);
        }
Exemple #2
0
        protected virtual double CalculateError(double[] desiredOutput)
        {
            double error       = 0;
            int    layersCount = Layers.Length;

            // assume, that all neurons of the network have the same activation function
            IActivationFunction function = Layers[0].Neurons[0].ActivationFunction;

            // calculate error values for the last layer first
            var layer = Layers[layersCount - 1] as ActivationLayerRProp;

            double[] layerDerivatives = _neuronErrors[layersCount - 1];

            for (int i = 0; i < layer?.Neurons.Length; i++)
            {
                double output = layer.Neurons[i].Output;

                double e = output - desiredOutput[i];
                layerDerivatives[i] = e * function.Derivative2Function(output);
                error += e * e;
            }


            // calculate error values for other layers
            for (int j = layersCount - 2; j >= 0; j--)
            {
                layer            = Layers[j] as ActivationLayerRProp;
                layerDerivatives = _neuronErrors[j];

                var      layerNext       = Layers[j + 1] as ActivationLayerRProp;
                double[] nextDerivatives = _neuronErrors[j + 1];

                // for all neurons of the layer
                for (int i = 0, n = layer.Neurons.Length; i < n; i++)
                {
                    var sum = layerNext.Neurons.Select((neuronRProp, k) => nextDerivatives[k] * layerNext.Neurons[k].Weights[i]).Sum();

                    layerDerivatives[i] = sum * function.Derivative2Function(layer.Neurons[i].Output);
                }
            }

            return(error / 2.0);
        }