Example #1
0
        ////////////////////////////////////////////////////////////////////////////////////////////////////
        /// <summary>   Backward CPU. </summary>
        ///
        /// <param name="y">    A NdArray to process. </param>
        /// <param name="x">    A NdArray to process. </param>
        ////////////////////////////////////////////////////////////////////////////////////////////////////

        private void BackwardCpu([CanBeNull] NdArray y, [CanBeNull] NdArray x)
        {
            Beta.ClearGrad();
            Gamma.ClearGrad();

            for (int i = 0; i < ChannelSize; i++)
            {
                for (int j = 0; j < y.BatchCount; j++)
                {
                    Beta.Grad[i]  += y.Grad[i + j * y.Length];
                    Gamma.Grad[i] += y.Grad[i + j * y.Length] * Xhat[j * ChannelSize + i];
                }
            }

            if (Verbose)
            {
                RILogManager.Default?.ViewerSendWatch("Learning", (IsTrain ? "Yes" : "No"));
            }

            if (IsTrain)
            {
                // with learning
                int m = y.BatchCount;

                for (int i = 0; i < ChannelSize; i++)
                {
                    Real gs = Gamma.Data[i] / Std[i];

                    for (int j = 0; j < y.BatchCount; j++)
                    {
                        Real val = (Xhat[j * ChannelSize + i] * Gamma.Grad[i] + Beta.Grad[i]) / m;
                        x.Grad[i + j * ChannelSize] += gs * (y.Grad[i + j * y.Length] - val);
                    }
                }
            }
            else
            {
                // No learning
                for (int i = 0; i < ChannelSize; i++)
                {
                    Real gs = Gamma.Data[i] / Std[i];
                    AvgMean.Grad[i] = -gs * Beta.Grad[i];
                    AvgVar.Grad[i]  = -0.5 * Gamma.Data[i] / AvgVar.Data[i] * Gamma.Grad[i];

                    for (int j = 0; j < y.BatchCount; j++)
                    {
                        x.Grad[i + j * ChannelSize] += gs * y.Grad[i + j * y.Length];
                    }
                }
            }
        }