Пример #1
0
        public void TestAutoGradMode()
        {
            var x = FloatTensor.RandomN(new long[] { 2, 3 }, requiresGrad: true);

            using (var mode = new AutoGradMode(false))
            {
                Assert.False(AutoGradMode.IsAutogradEnabled());
                var sum = x.Sum();
                Assert.Throws <ExternalException>(() => sum.Backward());
                //var grad = x.Grad();
                //Assert.True(grad.Handle == IntPtr.Zero);
            }
            using (var mode = new AutoGradMode(true))
            {
                Assert.True(AutoGradMode.IsAutogradEnabled());
                var sum = x.Sum();
                sum.Backward();
                var grad = x.Grad();
                Assert.False(grad.Handle == IntPtr.Zero);
                var data = grad.Data <float>();
                for (int i = 0; i < 2 * 3; i++)
                {
                    Assert.Equal(1.0, data[i]);
                }
            }
        }
Пример #2
0
        public void TestCustomModuleWithInPlaceModification()
        {
            var param  = Float32Tensor.RandomN(new long[] { 1000, 100 });
            var module = new TestModule("test", param, true);

            Assert.Equal(1000, module.GetParameter("test").Shape[0]);
            Assert.Equal(100, module.GetParameter("test").Shape[1]);

            using (var grad = new AutoGradMode(false))
            {
                param.TransposeInPlace(0, 1);
            }
            Assert.Equal(100, module.GetParameter("test").Shape[0]);
            Assert.Equal(1000, module.GetParameter("test").Shape[1]);
            Assert.Equal(100, param.Shape[0]);
            Assert.Equal(1000, param.Shape[1]);
        }
Пример #3
0
        public void TestTrainingWithDropout()
        {
            var lin1 = Linear(1000, 100);
            var lin2 = Linear(100, 10);
            var seq  = Sequential(("lin1", lin1), ("relu1", ReLU()), ("drop1", Dropout(0.1)), ("lin2", lin2));

            var x = Float32Tensor.randn(new long[] { 64, 1000 });
            var y = Float32Tensor.randn(new long[] { 64, 10 });

            float learning_rate = 0.00004f;
            var   loss          = mse_loss(NN.Reduction.Sum);

            float initialLoss = loss(seq.forward(x), y).ToSingle();
            float finalLoss   = float.MaxValue;

            for (int i = 0; i < 10; i++)
            {
                var eval    = seq.forward(x);
                var output  = loss(eval, y);
                var lossVal = output.ToSingle();

                finalLoss = lossVal;

                seq.ZeroGrad();

                output.backward();

                using (var noGrad = new AutoGradMode(false)) {
                    foreach (var param in seq.parameters())
                    {
                        var grad   = param.grad();
                        var update = grad.mul(learning_rate.ToScalar());
                        param.sub_(update);
                    }
                }
            }

            Assert.True(finalLoss < initialLoss);
        }
Пример #4
0
        public void TestTraining()
        {
            var lin1 = Linear(1000, 100);
            var lin2 = Linear(100, 10);
            var seq  = Sequential(("lin1", lin1), ("relu1", Relu()), ("lin2", lin2));

            var x = Float32Tensor.RandomN(new long[] { 64, 1000 });
            var y = Float32Tensor.RandomN(new long[] { 64, 10 });

            float learning_rate = 0.00004f;
            float prevLoss      = float.MaxValue;
            var   loss          = MSE(NN.Reduction.Sum);

            for (int i = 0; i < 10; i++)
            {
                var eval    = seq.Forward(x);
                var output  = loss(eval, y);
                var lossVal = output.ToSingle();

                Assert.True(lossVal < prevLoss);
                prevLoss = lossVal;

                seq.ZeroGrad();

                output.Backward();

                using (var noGrad = new AutoGradMode(false))
                {
                    foreach (var param in seq.GetParameters())
                    {
                        var grad   = param.Grad();
                        var update = grad.Mul(learning_rate.ToScalar());
                        param.SubInPlace(update);
                    }
                }
            }
        }
Пример #5
0
 /// <summary>
 /// Returns true if grad mode is currently enabled.
 /// </summary>
 /// <returns></returns>
 public static bool is_grad_enabled() => AutoGradMode.IsAutogradEnabled();