public void GetGradientsForWeights()
        {
            //Arrange
            float[] inputSignal = new float[]
            {
                0.95F, 0.55F, 0.90F
            };

            float[] deltasErrorThisLayer = new float[]
            {
                0.77F
            };

            float[][] expected = new float[][]
            {
                new float[] { 0.73149997F },
                new float[] { 0.4235F },
                new float[] { 0.692999959F }
            };

            //Act
            var actual = Backpropagation.GetGradientsForWeights(inputSignal, deltasErrorThisLayer);

            //Assert
            for (int i = 0; i < expected.Length; i++)
            {
                for (int j = 0; j < expected[i].Length; j++)
                {
                    Assert.AreEqual(expected[i][j], actual[i][j]);
                }
            }
        }
Beispiel #2
0
        protected virtual void UpdateWeightsForNeurons(LayerEventArgs layerEventArgs)
        {
            _GradientsForWeights = Backpropagation.GetGradientsForWeights(
                _InputSignals[(int)layerEventArgs.NumberOfActiveDataset],
                _DeltasErrorsOfNeurons);

            _UpdatesForWeights = Backpropagation.GetUpdatesForWeights(
                _GradientsForWeights,
                _UpdatesForWeights,
                layerEventArgs.LearningRate,
                layerEventArgs.MomentumRate,
                layerEventArgs.LearningOptimizing);

            for (int synapse = 0; synapse < _SynapsesCount; synapse++)
            {
                for (int neuron = 0; neuron < _NeuronsCount; neuron++)
                {
                    _NeuronWeights[synapse][neuron] += _UpdatesForWeights[synapse][neuron];
                }
            }
        }