Example #1
0
        public void ForwadBackwardTest()
        {
            var soft = new MLStudy.Deep.Softmax();

            soft.PrepareTrain(new TensorOld(3, 4));

            //模拟之前的N次操作
            for (int i = 0; i < 3; i++)
            {
                var noiseIn    = TensorOld.Rand(3, 4);
                var noiseError = TensorOld.Rand(3, 4);
                soft.Forward(noiseIn);
                soft.Backward(noiseError);
            }

            //真正的测试开始,等正常输出说明不受前面影响
            var input  = new TensorOld(new double[] { 7, 9, 1, -1, 2, -7, 2, 4, 7, 8, 4, -1 }, 3, 4);
            var y      = new TensorOld(new double[] { 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0 }, 3, 4); //第1个样本正确,第2,3个样本错误
            var output = soft.Forward(input);
            //用交叉熵计算Loss时反向传回的error
            var error = TensorOld.DivideElementWise(y, output) * -1;
            //计算出来的结果
            var actual = soft.Backward(error);
            //推导出来的结果,因为要把softmax和损失函数分离,所以实际应用时要分别计算
            var expected = output - y;

            //存在精度问题,有些值无法完全相等
            MyAssert.ApproximatelyEqual(expected, actual);
        }
Example #2
0
        public void ForwardBackwardTest()
        {
            var fl      = new FullLayer(3);
            var weights = TensorOld.Ones(4, 3);
            var bias    = TensorOld.Values(0.5, 1, 3);
            var input   = new TensorOld(new double[] { 1, 2, 3, 4, 5, 6, 7, 8 }, 2, 4);
            var error   = new TensorOld(new double[] { -1, 0.8, 1.5, 1, -1, 1 }, 2, 3);

            var forward         = new TensorOld(new double[] { 10.5, 10.5, 10.5, 26.5, 26.5, 26.5 }, 2, 3);
            var backward        = new TensorOld(new double[] { 1.3, 1.3, 1.3, 1.3, 1, 1, 1, 1 }, 2, 4);
            var biasGradient    = new TensorOld(new double[] { 0, -0.2, 2.5 }, 1, 3);
            var weightsGradient = new TensorOld(new double[, ]
            {
                { 4, -4.2, 6.5 },
                { 4, -4.4, 9 },
                { 4, -4.6, 11.5 },
                { 4, -4.8, 14 },
            });

            fl.PrepareTrain(input);
            fl.SetWeights(weights);
            fl.SetBias(bias);
            fl.Forward(input);
            fl.Backward(error);

            Assert.Equal(forward, fl.ForwardOutput);
            Assert.Equal(backward, fl.BackwardOutput);
            MyAssert.ApproximatelyEqual(biasGradient, fl.BiasGradient);
            Assert.Equal(weightsGradient, fl.WeightsGradient);
        }
Example #3
0
        public void MeanSquareErrorTest()
        {
            var y        = new TensorOld(new double[] { 1, 3, 2, 4, 5, 6 });
            var yHat     = new TensorOld(new double[] { 1.5, 2.6, 2.1, 3.9, 5.3, 6.7 });
            var error    = Functions.MeanSquareError(y, yHat);
            var expected = 0.16833333333333333;

            MyAssert.ApproximatelyEqual(expected, error);
        }
Example #4
0
        public void TanhTest()
        {
            var x        = 0;
            var d        = Derivatives.Tanh(x);
            var delta    = 0.0001;
            var expected = (Functions.Tanh(x + delta) - Functions.Tanh(x)) / delta;

            MyAssert.ApproximatelyEqual(expected, d);
        }
Example #5
0
        public void MeanSquareErrorTest()
        {
            var y        = new TensorOld(new double[] { 1, 3, 2, 4, 5, 6 });
            var yHat     = new TensorOld(new double[] { 1.5, 2.6, 2.1, 3.9, 5.3, 6.7 });
            var loss     = Functions.MeanSquareError(y, yHat);
            var gradient = Derivatives.MeanSquareError(y, yHat);
            var delta    = 0.00001;

            yHat[0] += delta;
            var expected = (Functions.MeanSquareError(y, yHat) - loss) / delta;

            MyAssert.ApproximatelyEqual(expected, gradient[0]);
        }
Example #6
0
        public void SoftmaxTest()
        {
            var output   = new double[] { 0.05, 0.15, 0.7, 0.1 };
            var expected = new double[, ]
            {
                { 0.0475, -0.0075, -0.035, -0.005 },
                { -0.0075, 0.1275, -0.105, -0.015 },
                { -0.035, -0.105, 0.21, -0.07 },
                { -0.005, -0.015, -0.07, 0.09 }
            };
            var actual = Derivatives.SoftmaxFromOutput(output);

            MyAssert.ApproximatelyEqual(expected, actual);
        }
Example #7
0
        public void SoftmaxCrossEntropyTest()
        {
            var y    = new double[] { 0, 1, 0, 0 };
            var yHat = new double[] { 0.1, 0.7, 0.15, 0.05 };
            var der  = Derivatives.CrossEntropy(y, yHat);

            var delta = 0.00001;
            var ce0   = Functions.CrossEntropy(y, yHat);

            yHat[0] += delta;
            var ce1      = Functions.CrossEntropy(y, yHat);
            var expected = (ce1 - ce0) / delta;

            MyAssert.ApproximatelyEqual(expected, der[0], 0.0001);
        }