Ejemplo n.º 1
0
        private void NumericGradCheckInternal(Model model)
        {
            const int passCount = 10;

            for (int i = 0; i < passCount; i++)
            {
                model.FirstInputValue = Utils.GetRandomNumber(_rnd, 0.05, 10);
                Matrix target = new Matrix(2 * model.FirstInputValue);

                model.ForwardPass(target);

                model.LossLayer.ComputeLossGradients(target);
                model[1].ComputeGradient();
                model[0].ComputeGradient();

                double fa = model[0].Weights.Extra[0, 0]; // Analytical gradient - dy/dw

                double initWeight = model[0].Weights.Primal[0, 0];
                model[0].Weights.Primal[0, 0] = initWeight + GradUtils.H;
                model.ForwardPass(target);
                double f1Val = model.FirstLossValue;

                model[0].Weights.Primal[0, 0] = initWeight - GradUtils.H;
                model.ForwardPass(target);
                double f2Val = model.FirstLossValue;

                Assert.IsTrue(GradUtils.ChechAnalGrad(fa, f1Val, f2Val), "Failed on " + i + " try");
                model[1].ApplyGradient();
                model[0].ApplyGradient();
            }
        }
Ejemplo n.º 2
0
        private static void CheckSquareInternal(double h, double t)
        {
            SquareError errorOp = new SquareError();
            double      fa1     = errorOp.ComputeLossGradient(h, t);

            double f1Val = errorOp.ForwardPass(h + GradUtils.H, t);
            double f2Val = errorOp.ForwardPass(h - GradUtils.H, t);
            bool   res   = GradUtils.ChechAnalGrad(fa1, f1Val, f2Val);

            Assert.IsTrue(res);
        }