public void DropoutSerialization()
        {
            var layer = new DropoutLayer {
                DropProbability = 0.1
            };

            layer.Init(28, 24, 1);
            var data = layer.GetData();

            Assert.AreEqual(28, data["InputWidth"]);
            Assert.AreEqual(24, data["InputHeight"]);
            Assert.AreEqual(1, data["InputDepth"]);

            var deserialized = LayerBase <double> .FromData(data) as DropoutLayer;

            Assert.IsNotNull(deserialized);
            Assert.AreEqual(28, deserialized.InputWidth);
            Assert.AreEqual(24, deserialized.InputHeight);
            Assert.AreEqual(1, deserialized.InputDepth);
            Assert.AreEqual(layer.OutputWidth, deserialized.OutputWidth);
            Assert.AreEqual(layer.OutputHeight, deserialized.OutputHeight);
            Assert.AreEqual(layer.OutputDepth, deserialized.OutputDepth);

            Assert.AreEqual(layer.DropProbability, deserialized.DropProbability);
        }
        public void NotLearning()
        {
            var n = 1000000;
            var dropProbability = 0.2;
            var layer           = new DropoutLayer <double>(dropProbability);

            layer.Init(1, 1, n);

            var input  = BuilderInstance.Volume.From(new double[n].Populate(1.0), new Shape(1, 1, n, 1));
            var result = layer.DoForward(input, false);

            var average = result.ToArray().Average();

            Assert.AreEqual(1.0, average); // Let everything go through
        }
        public void Learning()
        {
            var n = 1000000;
            var dropProbability = 0.2;
            var layer           = new DropoutLayer <double>(dropProbability);

            layer.Init(1, 1, n);

            var input  = BuilderInstance.Volume.From(new double[n].Populate(1.0), new Shape(1, 1, n, 1));
            var result = layer.DoForward(input, true);

            var val           = result.ToArray().First(o => o != 0.0);
            var scalingFactor = 1.0 / (1.0 - dropProbability);

            Assert.AreEqual(scalingFactor, val); // Make sure output is scaled during learning

            var average       = result.ToArray().Average();
            var measuredProba = average * dropProbability;

            Assert.AreEqual(dropProbability, measuredProba, 0.001); // Make sure dropout really happened
        }