public void DropoutLayer_ForwardTestPhase()
        {
            Context.Instance.Phase = PhaseType.Test;

            var layer = new DropoutLayer();

            layer.Setup(bottom, top);
            layer.Forward(bottom, top);

            Assert.Equal(bottom.Count, top.Count);

            using (var topCpu = top.OnCpu())
                using (var bottomCpu = bottom.OnCpu())
                {
                    int count = bottom.Count;
                    for (int i = 0; i < count; i++)
                    {
                        if (!MathHelpers.Equality(topCpu.DataAt(i), 0))
                        {
                            Assert.True(MathHelpers.Equality(topCpu.DataAt(i), bottomCpu.DataAt(i)));
                        }
                    }
                    ;
                }
        }
        public void DropoutLayer_MultipleBackwardsPasses()
        {
            const int fanIn     = 5;
            var       batchSize = 1;
            var       random    = new Random(232);

            var sut = new DropoutLayer(0.5);

            sut.Initialize(5, 1, 1, batchSize, Initialization.GlorotUniform, random);

            var input = Matrix <float> .Build.Random(batchSize, fanIn, random.Next());

            sut.Forward(input);

            var delta = Matrix <float> .Build.Dense(batchSize, fanIn, 1.0f);

            var expected = Matrix <float> .Build.Dense(batchSize, fanIn);

            sut.Backward(delta).CopyTo(expected);

            for (int i = 0; i < 20; i++)
            {
                var actual = sut.Backward(delta);
                Assert.AreEqual(expected, actual);
            }
        }
        public void DropoutLayer_ForwardTrainPhase(double ratio)
        {
            Context.Instance.Phase = PhaseType.Train;

            var config = new DropoutLayerConfiguration(ratio);
            var layer  = new DropoutLayer(config);

            layer.Setup(bottom, top);
            layer.Forward(bottom, top);

            Assert.Equal(bottom.Count, top.Count);

            using (var topCpu = top.OnCpu())
                using (var bottomCpu = bottom.OnCpu())
                {
                    double scale = 1f / (1f - layer.Parameters.Ratio);

                    int count = bottom.Count;
                    int kept  = 0;
                    for (int i = 0; i < count; i++)
                    {
                        if (!MathHelpers.Equality(topCpu.DataAt(i), 0))
                        {
                            kept++;
                            Assert.True(MathHelpers.Equality(topCpu.DataAt(i), bottomCpu.DataAt(i) * scale));
                        }
                    }
                    ;

                    double stdError = Math.Sqrt(ratio * (1 - ratio) / count);
                    double empiricalDropoutRatio = 1.0d - ((double)kept / count);

                    Assert.True(MathHelpers.Equality(ratio, empiricalDropoutRatio, 1.96 * stdError));
                }
        }
Example #4
0
        public void ForwardTest1()
        {
            Shape        shape = new Shape(new int[] { -1, 10000 });
            DropoutLayer layer = new DropoutLayer(shape, 0.5);

            for (int i = 1; i <= 3; i++)
            {
                Session session = new Session(false);

                Tensor x = new Tensor(null, shape.Reshape(0, i));
                x.Randomize();

                IList <Tensor> xs = new[] { x };
                IList <Tensor> ys = layer.Forward(session, xs);

                Assert.AreEqual(x.Weights.Sum() * layer.Probability, ys[0].Weights.Sum());
            }
        }
        public void DropoutLayer_Forward()
        {
            const int fanIn     = 5;
            var       batchSize = 1;
            var       random    = new Random(232);

            var sut = new DropoutLayer(0.5);

            sut.Initialize(5, 1, 1, batchSize, Initialization.GlorotUniform, random);

            var input = Matrix <float> .Build.Random(batchSize, fanIn, random.Next());

            var actual = sut.Forward(input);

            Trace.WriteLine(string.Join(", ", actual.ToColumnMajorArray()));

            var expected = Matrix <float> .Build.Dense(batchSize, fanIn, new float[] { 0.9177308f, 1.495695f, -0.07688076f, 0f, -2.932818f });

            MatrixAsserts.AreEqual(expected, actual);
        }
        public void DropoutLayer_ForwardTestPhase()
        {
            Context.Instance.Phase = PhaseType.Test;

            var layer = new DropoutLayer();
            layer.Setup(bottom, top);
            layer.Forward(bottom, top);

            Assert.Equal(bottom.Count, top.Count);

            using (var topCpu = top.OnCpu())
            using (var bottomCpu = bottom.OnCpu())
            {
                int count = bottom.Count;
                for (int i = 0; i < count; i++)
                {
                    if (!MathHelpers.Equality(topCpu.DataAt(i), 0))
                        Assert.True(MathHelpers.Equality(topCpu.DataAt(i), bottomCpu.DataAt(i)));
                };
            }
        }
        public void DropoutLayer_Backward()
        {
            const int fanIn     = 5;
            var       batchSize = 1;
            var       random    = new Random(232);

            var sut = new DropoutLayer(0.5);

            sut.Initialize(5, 1, 1, batchSize, Initialization.GlorotUniform, random);

            var input = Matrix <float> .Build.Random(batchSize, fanIn, random.Next());

            sut.Forward(input);

            var delta = Matrix <float> .Build.Random(batchSize, fanIn, random.Next());

            var actual = sut.Backward(delta);

            Trace.WriteLine(string.Join(", ", actual.ToColumnMajorArray()));

            var expected = Matrix <float> .Build.Dense(batchSize, fanIn, new float[] { -1.676851f, -1.938897f, -1.108109f, 0f, -0.4058239f });

            MatrixAsserts.AreEqual(expected, actual);
        }
Example #8
0
        public void ForwardBackwardTest2()
        {
            Shape        shape = new Shape(new int[] { -1, 10000 });
            DropoutLayer layer = new DropoutLayer(shape, 0.5);

            for (int i = 1; i <= 3; i++)
            {
                Session session = new Session(true);

                Tensor x = new Tensor(null, shape.Reshape(0, i));
                x.Randomize();

                Tensor y = layer.Forward(session, new[] { x })[0];

                Assert.AreEqual(0.0f, y.Weights.Sum(), 1.0f);
                Assert.AreEqual((int)(y.Length * layer.Probability), y.Weights.Count(w => w == 0.0f), y.Length / 50);

                // unroll the graph
                y.SetGradient(1.0f);
                session.Unroll();

                CollectionAssert.AreEqual(y.Weights.Select(w => w == 0.0f ? 0.0f : 1.0f).ToArray(), x.Gradient);
            }
        }
        public void DropoutLayer_ForwardTrainPhase(double ratio)
        {
            Context.Instance.Phase = PhaseType.Train;

            var config = new DropoutLayerConfiguration(ratio);
            var layer = new DropoutLayer(config);
            layer.Setup(bottom, top);
            layer.Forward(bottom, top);

            Assert.Equal(bottom.Count, top.Count);

            using (var topCpu = top.OnCpu())
            using (var bottomCpu = bottom.OnCpu())
            {
                double scale = 1f / (1f - layer.Parameters.Ratio);

                int count = bottom.Count;
                int kept = 0;
                for (int i = 0; i < count; i++)
                {
                    if (!MathHelpers.Equality(topCpu.DataAt(i), 0))
                    {
                        kept++;
                        Assert.True(MathHelpers.Equality(topCpu.DataAt(i), bottomCpu.DataAt(i) * scale));
                    }
                };

                double stdError = Math.Sqrt(ratio * (1 - ratio) / count);
                double empiricalDropoutRatio = 1.0d - ((double)kept / count);

                Assert.True(MathHelpers.Equality(ratio, empiricalDropoutRatio, 1.96 * stdError));
            }
        }