Esempio n. 1
0
        private void FindGradientAndUpdateWeights(
            double[] layerGradient,
            double learningRate,
            int layerIndex,
            ref double[][] prevWeight,
            double[] inputResults      = null,
            double[] input             = null,
            double[] prevLayerGradient = null)
        {
            if (layerIndex == LayersCount - 1)
            {
                for (int i = 0; i < OutputLayer.NeuronsCount; ++i)
                {
                    layerGradient[i] = (OutputLayer.OutputNonMatrix[i] - inputResults[i])
                                       * OutputLayer.ActivateFunctionDerivative(OutputLayer.SumOutputNonMatrix[i]);
                }
            }
            else
            {
                for (int i = 0; i < Layers[layerIndex].NeuronsCount; ++i)
                {
                    layerGradient[i] = .0;
                    for (int j = 0; j < Layers[layerIndex + 1].NeuronsCount; ++j)
                    {
                        layerGradient[i] += prevLayerGradient[j] * prevWeight[i][j];
                    }
                    layerGradient[i] *= Layers[layerIndex].ActivateFunctionDerivative(Layers[layerIndex].SumOutputNonMatrix[i]);
                }
            }


            prevWeight = Layers[layerIndex].Weights.DeepCopy();

            if (layerIndex != 0)
            {
                for (int i = 0; i < Layers[layerIndex].WeightRowsCount; ++i)
                {
                    for (int j = 0; j < Layers[layerIndex].WeightColumnsCount; ++j)
                    {
                        var gradientWeight = layerGradient[j] * Layers[layerIndex - 1].OutputNonMatrix[i];
                        Layers[layerIndex].Weights[i][j] -= learningRate * gradientWeight;
                    }
                }
            }
            else
            {
                for (int i = 0; i < Layers[0].WeightRowsCount; ++i)
                {
                    for (int j = 0; j < Layers[0].WeightColumnsCount; ++j)
                    {
                        var gradientWeight = layerGradient[j] * input[i];
                        Layers[0].Weights[i][j] -= learningRate * gradientWeight;
                    }
                }
            }
        }
Esempio n. 2
0
        public double[] GetOutputLayerGradient(double[] inputResults)
        {
            var layerGradient = new double[OutputLayer.NeuronsCount];

            for (int i = 0; i < OutputLayer.NeuronsCount; ++i)
            {
                layerGradient[i] = (OutputLayer.OutputNonMatrix[i] - inputResults[i])
                                   * OutputLayer.ActivateFunctionDerivative(OutputLayer.SumOutputNonMatrix[i]);
            }

            return(layerGradient);
        }