Exemple #1
0
        protected override void BackPropInternal(Tensor outputGradient)
        {
            Tensor.Conv2DInputsGradient(outputGradient, Kernels, Stride, InputsGradient[0]);
            Tensor.Conv2DKernelsGradient(Inputs[0], outputGradient, Stride, Tensor.PaddingType.Valid, KernelsGradient);

            if (UseBias)
            {
                BiasGradient.Add(outputGradient.SumBatches());
            }
        }
Exemple #2
0
        protected override void BackPropInternal(Tensor outputGradient)
        {
            // for explanation watch https://www.youtube.com/watch?v=8H2ODPNxEgA&t=898s
            // each input is responsible for the output error proportionally to weights it is multiplied by
            Weights.Transposed().Mul(outputGradient, InputsGradient[0]);

            WeightsGradient.Add(outputGradient.Mul(Inputs[0].Transposed()).SumBatches(), WeightsGradient);
            if (UseBias)
            {
                BiasGradient.Add(outputGradient.SumBatches(), BiasGradient);
            }
        }
Exemple #3
0
        //计算Weights和Bias的梯度
        private void ComputeGradient(TensorOld error)
        {
            var inputData = ForwardInput.GetRawValues();
            var errorData = error.GetRawValues();
            var features  = WeightsGradient.shape[0];

            Parallel.For(0, features, i =>
            {
                Parallel.For(0, UnitCount, j =>
                {
                    var weightSum = 0d;
                    var biasSum   = 0d;
                    for (int k = 0; k < sampleStartIndex.Length; k++)
                    {
                        weightSum += inputData[sampleStartIndex[k] + i] * errorData[errorStartIndex[k] + j];
                        biasSum   += errorData[errorStartIndex[k] + j];
                    }
                    WeightsGradient[i, j]          = weightSum;
                    BiasGradient.GetRawValues()[j] = biasSum;
                });
            });
        }