Пример #1
0
        public void ReluLayer_ForwardGradientWithLeakyUnits()
        {
            // http://en.wikipedia.org/wiki/Rectifier_(neural_networks)#Leaky_ReLUs

            var config = new ReluLayerConfiguration(0.01f);
            var layer = new ReluLayer(config);
            layer.Setup(bottom, top);
            layer.Forward(bottom, top);

            double slope = layer.Parameters.NegativeSlope;

            Assert.Equal(bottom.Count, top.Count);

            using (var topCpu = top.OnCpu())
            using (var bottomCpu = bottom.OnCpu())
            {
                int count = bottom.Count;
                for (int i = 0; i < count; i++)
                {
                    if (bottomCpu.DataAt(i) <= 0)
                    {
                        Assert.True(topCpu.DataAt(i) >= bottomCpu.DataAt(i) * slope - 0.000001);
                    }
                    else
                    {
                        Assert.True(topCpu.DataAt(i) >= 0.0d);
                        Assert.True(topCpu.DataAt(i) == 0.0d || topCpu.DataAt(i) == bottomCpu.DataAt(i));
                    }
                };
            }
        }
Пример #2
0
        public void ReluLayer_ForwardWithLeakyUnits()
        {
            // http://en.wikipedia.org/wiki/Rectifier_(neural_networks)#Leaky_ReLUs

            var config = new ReluLayerConfiguration(0.01f);
            var layer  = new ReluLayer(config);

            layer.Setup(bottom, top);
            layer.Forward(bottom, top);

            double slope = layer.Parameters.NegativeSlope;

            Assert.Equal(bottom.Count, top.Count);

            using (var topCpu = top.OnCpu())
                using (var bottomCpu = bottom.OnCpu())
                {
                    int count = bottom.Count;
                    for (int i = 0; i < count; i++)
                    {
                        if (bottomCpu.DataAt(i) <= 0)
                        {
                            Assert.True(topCpu.DataAt(i) >= bottomCpu.DataAt(i) * slope - 0.000001);
                        }
                        else
                        {
                            Assert.True(topCpu.DataAt(i) >= 0.0d);
                            Assert.True(topCpu.DataAt(i) == 0.0d || topCpu.DataAt(i) == bottomCpu.DataAt(i));
                        }
                    }
                    ;
                }
        }
Пример #3
0
        public void ReluLayer_BackwardGradientWithLeakyUnits()
        {
            var config = new ReluLayerConfiguration(0.01f);
            var layer = new ReluLayer(config);

            var checker = new GradientChecker(1e-2f, 1e-2f, 1701, 0.0d, 0.01f);
            checker.CheckEltwise(layer, bottom, top);
        }
Пример #4
0
        public void ReluLayer_BackwardGradientWithLeakyUnits()
        {
            var config = new ReluLayerConfiguration(0.01f);
            var layer  = new ReluLayer(config);

            var checker = new GradientChecker(1e-2f, 1e-2f, 1701, 0.0d, 0.01f);

            checker.CheckEltwise(layer, bottom, top);
        }