コード例 #1
0
        public void ReluLayer_ForwardWithLeakyUnits()
        {
            // http://en.wikipedia.org/wiki/Rectifier_(neural_networks)#Leaky_ReLUs

            var config = new ReluLayerConfiguration(0.01f);
            var layer  = new ReluLayer(config);

            layer.Setup(bottom, top);
            layer.Forward(bottom, top);

            double slope = layer.Parameters.NegativeSlope;

            Assert.Equal(bottom.Count, top.Count);

            using (var topCpu = top.OnCpu())
                using (var bottomCpu = bottom.OnCpu())
                {
                    int count = bottom.Count;
                    for (int i = 0; i < count; i++)
                    {
                        if (bottomCpu.DataAt(i) <= 0)
                        {
                            Assert.True(topCpu.DataAt(i) >= bottomCpu.DataAt(i) * slope - 0.000001);
                        }
                        else
                        {
                            Assert.True(topCpu.DataAt(i) >= 0.0d);
                            Assert.True(topCpu.DataAt(i) == 0.0d || topCpu.DataAt(i) == bottomCpu.DataAt(i));
                        }
                    }
                    ;
                }
        }
コード例 #2
0
        public void SerializationTest()
        {
            // Create a ReluLayer
            var layer = new ReluLayer();

            layer.Init(10, 10, 3);

            using (var ms = new MemoryStream())
            {
                // Serialize
                IFormatter formatter = new BinaryFormatter();
                formatter.Serialize(ms, layer);

                // Deserialize
                ms.Position = 0;
                var result = formatter.Deserialize(ms) as ReluLayer;

                Assert.AreEqual(layer.InputDepth, result.InputDepth);
                Assert.AreEqual(layer.InputHeight, result.InputHeight);
                Assert.AreEqual(layer.InputWidth, result.InputWidth);
                Assert.AreEqual(layer.OutputDepth, result.OutputDepth);
                Assert.AreEqual(layer.OutputHeight, result.OutputHeight);
                Assert.AreEqual(layer.OutputWidth, result.OutputWidth);
            }
        }
コード例 #3
0
        public static ReluLayer<T> Relu<T>(this LayerBase<T> layer) where T : struct, IEquatable<T>, IFormattable
        {
            var relu = new ReluLayer<T>();
            relu.AcceptParent(layer);

            return relu;
        }
コード例 #4
0
        public void ReluLayer_BackwardGradient()
        {
            var layer = new ReluLayer();

            var checker = new GradientChecker(1e-2f, 1e-3f, 1701, 0.0d, 0.01f);
            checker.CheckEltwise(layer, bottom, top);
        }
コード例 #5
0
        public void ReluLayer_ForwardGradientWithLeakyUnits()
        {
            // http://en.wikipedia.org/wiki/Rectifier_(neural_networks)#Leaky_ReLUs

            var config = new ReluLayerConfiguration(0.01f);
            var layer = new ReluLayer(config);
            layer.Setup(bottom, top);
            layer.Forward(bottom, top);

            double slope = layer.Parameters.NegativeSlope;

            Assert.Equal(bottom.Count, top.Count);

            using (var topCpu = top.OnCpu())
            using (var bottomCpu = bottom.OnCpu())
            {
                int count = bottom.Count;
                for (int i = 0; i < count; i++)
                {
                    if (bottomCpu.DataAt(i) <= 0)
                    {
                        Assert.True(topCpu.DataAt(i) >= bottomCpu.DataAt(i) * slope - 0.000001);
                    }
                    else
                    {
                        Assert.True(topCpu.DataAt(i) >= 0.0d);
                        Assert.True(topCpu.DataAt(i) == 0.0d || topCpu.DataAt(i) == bottomCpu.DataAt(i));
                    }
                };
            }
        }
コード例 #6
0
        public void ComputeTwiceGradientShouldYieldTheSameResult()
        {
            const int inputWidth  = 20;
            const int inputHeight = 20;
            const int inputDepth  = 2;

            var layer = new ReluLayer();

            layer.Init(inputWidth, inputHeight, inputDepth);

            // Forward pass
            var input  = BuilderInstance.Volume.Random(new Shape(inputWidth, inputHeight, inputDepth));
            var output = layer.DoForward(input, true);

            // Set output gradients to 1
            var outputGradient = BuilderInstance.Volume.SameAs(new double[output.Shape.TotalLength].Populate(1.0), output.Shape);

            // Backward pass to retrieve gradients
            layer.Backward(outputGradient);
            var step1 = ((Volume.Double.Volume)layer.InputActivationGradients.Clone()).ToArray();

            layer.Backward(outputGradient);
            var step2 = ((Volume.Double.Volume)layer.InputActivationGradients.Clone()).ToArray();

            Assert.IsTrue(step1.SequenceEqual(step2));
        }
コード例 #7
0
        public void ReluLayer_BackwardGradientWithLeakyUnits()
        {
            var config = new ReluLayerConfiguration(0.01f);
            var layer = new ReluLayer(config);

            var checker = new GradientChecker(1e-2f, 1e-2f, 1701, 0.0d, 0.01f);
            checker.CheckEltwise(layer, bottom, top);
        }
コード例 #8
0
        public static ReluLayer <T> Relu <T>(this LayerBase <T> layer) where T : struct, IEquatable <T>, IFormattable
        {
            var relu = new ReluLayer <T>();

            layer.ConnectTo(relu);

            return(relu);
        }
コード例 #9
0
        public static ReluLayer Relu(this LayerBase layer)
        {
            var relu = new ReluLayer();

            layer.ConnectTo(relu);

            return(relu);
        }
コード例 #10
0
        public void ReluLayer_BackwardGradient()
        {
            var layer = new ReluLayer();

            var checker = new GradientChecker(1e-2f, 1e-3f, 1701, 0.0d, 0.01f);

            checker.CheckEltwise(layer, bottom, top);
        }
コード例 #11
0
ファイル: rldemoWindow.cs プロジェクト: play3577/ConvNetCS
        private static Network CreateNewNetwork()
        {
            var num_inputs      = 27; // 9 eyes, each sees 3 numbers (wall, green, red thing proximity)
            var num_actions     = 5;  // 5 possible angles agent can turn
            var temporal_window = 1;  // amount of temporal memory. 0 = agent lives in-the-moment :)
            var network_size    = num_inputs * temporal_window + num_actions * temporal_window + num_inputs;

            Network net = new Network();

            InputLayer il = new InputLayer();

            il.OutputWidth  = 1;
            il.OutputHeight = 1;
            il.OutputDepth  = network_size;
            net.Layers.Add(il);


            ConvLayer conv = new ConvLayer(16, 5, 5, il.OutputDepth, il.OutputWidth, il.OutputHeight, 1, 2, 0, 1, 0.1);

            net.Layers.Add(conv);

            ReluLayer rlv = new ReluLayer(conv.OutputDepth, conv.OutputWidth, conv.OutputHeight);

            net.Layers.Add(rlv);

            MaxPoolLayer pl = new MaxPoolLayer(2, 2, rlv.OutputDepth, rlv.OutputWidth, rlv.OutputHeight, 2, 0, 0);

            net.Layers.Add(pl);

            FullyConnLayer fc = new FullyConnLayer(50, pl.OutputDepth, pl.OutputWidth, pl.OutputHeight, 0, 1, 0);

            net.Layers.Add(fc);

            ReluLayer rl = new ReluLayer(fc.OutputDepth, fc.OutputWidth, fc.OutputHeight);

            net.Layers.Add(rl);



            FullyConnLayer fc2 = new FullyConnLayer(50, rl.OutputDepth, rl.OutputWidth, rl.OutputHeight, 0, 1, 0);

            net.Layers.Add(fc2);

            ReluLayer rl2 = new ReluLayer(fc2.OutputDepth, fc2.OutputWidth, fc2.OutputHeight);

            net.Layers.Add(rl2);



            FullyConnLayer fc8 = new FullyConnLayer(5, rl2.OutputDepth, rl2.OutputWidth, rl2.OutputHeight, 0, 1, 0);

            net.Layers.Add(fc8);

            RegressionLayer sl = new RegressionLayer(fc8.OutputDepth, fc8.OutputWidth, fc8.OutputHeight);

            net.LossLayer = sl;
            return(net);
        }
コード例 #12
0
ファイル: Program.cs プロジェクト: play3577/ConvNetCS
        private static Network CreateNewNetwork()
        {
            Network net = new Network();

            InputLayer il = new InputLayer();

            il.OutputWidth  = 32;
            il.OutputHeight = 32;
            il.OutputDepth  = 3;
            net.Layers.Add(il);

            ConvLayer conv = new ConvLayer(16, 5, 5, 3, 32, 32, 1, 2, 0, 1, 0.1);

            net.Layers.Add(conv);

            ReluLayer rl = new ReluLayer(conv.OutputDepth, conv.OutputWidth, conv.OutputHeight);

            net.Layers.Add(rl);

            MaxPoolLayer pl = new MaxPoolLayer(2, 2, rl.OutputDepth, rl.OutputWidth, rl.OutputHeight, 2, 0, 0);

            net.Layers.Add(pl);


            ConvLayer conv2 = new ConvLayer(20, 5, 5, pl.OutputDepth, pl.OutputWidth, pl.OutputHeight, 1, 2, 0, 1, 0.1);

            net.Layers.Add(conv2);

            ReluLayer rl2 = new ReluLayer(conv2.OutputDepth, conv2.OutputWidth, conv2.OutputHeight);

            net.Layers.Add(rl2);

            MaxPoolLayer pl2 = new MaxPoolLayer(2, 2, rl2.OutputDepth, rl2.OutputWidth, rl2.OutputHeight, 2, 0, 0);

            net.Layers.Add(pl2);


            ConvLayer conv3 = new ConvLayer(20, 5, 5, pl2.OutputDepth, pl2.OutputWidth, pl2.OutputHeight, 1, 2, 0, 1, 0.1);

            net.Layers.Add(conv3);

            ReluLayer rl3 = new ReluLayer(conv3.OutputDepth, conv3.OutputWidth, conv3.OutputHeight);

            net.Layers.Add(rl3);

            MaxPoolLayer pl3 = new MaxPoolLayer(2, 2, rl3.OutputDepth, rl3.OutputWidth, rl3.OutputHeight, 2, 0, 0);

            net.Layers.Add(pl3);

            FullyConnLayer fc = new FullyConnLayer(10, pl3.OutputDepth, pl3.OutputWidth, pl3.OutputHeight, 0, 1, 0);

            net.Layers.Add(fc);

            SoftmaxLayer sl = new SoftmaxLayer(fc.OutputDepth, fc.OutputWidth, fc.OutputHeight);

            net.LossLayer = sl;
            return(net);
        }
コード例 #13
0
        public void ReluLayer_BackwardGradientWithLeakyUnits()
        {
            var config = new ReluLayerConfiguration(0.01f);
            var layer  = new ReluLayer(config);

            var checker = new GradientChecker(1e-2f, 1e-2f, 1701, 0.0d, 0.01f);

            checker.CheckEltwise(layer, bottom, top);
        }
コード例 #14
0
        public static ReluLayer<T> Relu<T>(this ConvLayer<T> layer) where T : struct, IEquatable<T>, IFormattable
        {
            var relu = new ReluLayer<T>();
            relu.AcceptParent(layer);

            layer.BiasPref = (T)Convert.ChangeType(0.1, typeof(T)); // can we do better?

            return relu;
        }
コード例 #15
0
        public static ReluLayer <T> Relu <T>(this ConvLayer <T> layer) where T : struct, IEquatable <T>, IFormattable
        {
            var relu = new ReluLayer <T>();

            layer.ConnectTo(relu);

            layer.BiasPref = (T)Convert.ChangeType(0.1, typeof(T));  // can we do better?
            layer.UpdateOutputSize();

            return(relu);
        }
コード例 #16
0
        public void GradientWrtInputCheck()
        {
            const int inputWidth  = 20;
            const int inputHeight = 20;
            const int inputDepth  = 2;

            // Create layer
            var layer = new ReluLayer();

            GradientCheckTools.GradientCheck(layer, inputWidth, inputHeight, inputDepth, 1e-6);
        }
コード例 #17
0
        public void ReluLayer_Setup()
        {
            var layer = new ReluLayer();

            layer.Setup(bottom, top);

            Assert.Equal(bottom.Num, top.Num);
            Assert.Equal(bottom.Channels, top.Channels);
            Assert.Equal(bottom.Height, top.Height);
            Assert.Equal(bottom.Width, top.Width);
        }
コード例 #18
0
        public static ReluLayer Relu(this ConvLayer layer)
        {
            var relu = new ReluLayer();

            layer.ConnectTo(relu);

            layer.BiasPref = 0.1;
            layer.UpdateOutputSize();

            return(relu);
        }
コード例 #19
0
        public void GradientWrtInputCheck()
        {
            const int inputWidth = 20;
            const int inputHeight = 20;
            const int inputDepth = 2;

            // Create layer
            var layer = new ReluLayer();

            GradientCheckTools.GradientCheck(layer, inputWidth, inputHeight, inputDepth, 1e-6);
        }
コード例 #20
0
        public void Instantiation()
        {
            const int inputWidth  = 20;
            const int inputHeight = 20;
            const int inputDepth  = 2;

            var layer = new ReluLayer();

            layer.Init(inputWidth, inputHeight, inputDepth);

            Assert.AreEqual(20, layer.OutputWidth);
            Assert.AreEqual(20, layer.OutputHeight);
            Assert.AreEqual(2, layer.OutputDepth);
        }
コード例 #21
0
        public void ReluLayer_Forward()
        {
            var layer = new ReluLayer();
            layer.Setup(bottom, top);
            layer.Forward(bottom, top);

            Assert.Equal(bottom.Count, top.Count);

            using (var topCpu = top.OnCpu())
            using (var bottomCpu = bottom.OnCpu())
            {
                int count = bottom.Count;
                for (int i = 0; i < count; i++)
                {
                    Assert.True(topCpu.DataAt(i) >= 0.0d);
                    Assert.True(topCpu.DataAt(i) == 0.0d || topCpu.DataAt(i) == bottomCpu.DataAt(i));
                };
            }
        }
コード例 #22
0
ファイル: DrawImage.cs プロジェクト: markhsia/ConvNetCS
        private static Network CreateNewNetwork()
        {
            Network net = new Network();

            InputLayer il = new InputLayer();

            il.OutputWidth  = 1;
            il.OutputHeight = 1;
            il.OutputDepth  = 2;
            net.Layers.Add(il);



            FullyConnLayer fc = new FullyConnLayer(50, il.OutputDepth, il.OutputWidth, il.OutputHeight, 0, 1, 0);

            net.Layers.Add(fc);

            ReluLayer rl = new ReluLayer(fc.OutputDepth, fc.OutputWidth, fc.OutputHeight);

            net.Layers.Add(rl);



            FullyConnLayer fc2 = new FullyConnLayer(50, rl.OutputDepth, rl.OutputWidth, rl.OutputHeight, 0, 1, 0);

            net.Layers.Add(fc2);

            ReluLayer rl2 = new ReluLayer(fc2.OutputDepth, fc2.OutputWidth, fc2.OutputHeight);

            net.Layers.Add(rl2);



            FullyConnLayer fc8 = new FullyConnLayer(3, rl2.OutputDepth, rl2.OutputWidth, rl2.OutputHeight, 0, 1, 0);

            net.Layers.Add(fc8);

            RegressionLayer sl = new RegressionLayer(fc8.OutputDepth, fc8.OutputWidth, fc8.OutputHeight);

            net.LossLayer = sl;
            return(net);
        }
コード例 #23
0
        public void ReluLayerSerialization()
        {
            var layer = new ReluLayer();

            layer.Init(28, 24, 1);
            var data = layer.GetData();

            Assert.AreEqual(28, data["InputWidth"]);
            Assert.AreEqual(24, data["InputHeight"]);
            Assert.AreEqual(1, data["InputDepth"]);

            var deserialized = LayerBase <double> .FromData(data) as ReluLayer;

            Assert.IsNotNull(deserialized);
            Assert.AreEqual(28, deserialized.InputWidth);
            Assert.AreEqual(24, deserialized.InputHeight);
            Assert.AreEqual(1, deserialized.InputDepth);
            Assert.AreEqual(layer.OutputWidth, deserialized.OutputWidth);
            Assert.AreEqual(layer.OutputHeight, deserialized.OutputHeight);
            Assert.AreEqual(layer.OutputDepth, deserialized.OutputDepth);
        }
コード例 #24
0
        public void ReluLayer_Forward()
        {
            var layer = new ReluLayer();

            layer.Setup(bottom, top);
            layer.Forward(bottom, top);

            Assert.Equal(bottom.Count, top.Count);

            using (var topCpu = top.OnCpu())
                using (var bottomCpu = bottom.OnCpu())
                {
                    int count = bottom.Count;
                    for (int i = 0; i < count; i++)
                    {
                        Assert.True(topCpu.DataAt(i) >= 0.0d);
                        Assert.True(topCpu.DataAt(i) == 0.0d || topCpu.DataAt(i) == bottomCpu.DataAt(i));
                    }
                    ;
                }
        }
コード例 #25
0
        public static NeuralNetwork loadNetwork(String file_name)
        {
            String text;

            String[] sections;

            Layer[] layers;

            using (StreamReader streamReader = new StreamReader("models\\" + file_name + ".network", Encoding.UTF8))
            {
                text     = streamReader.ReadToEnd();
                sections = text.Split("#".ToCharArray());
            }

            String[] values = sections[0].Split(";".ToCharArray());
            layers = new Layer[values.Length];

            for (int i = 0; i < sections.Length; i++)
            {
                if (i == 0)
                {
                    for (int j = 0; j < layers.Length; j++)
                    {
                        String[] curText = values[j].Split(" ".ToCharArray());

                        if (curText[0] == "relu")
                        {
                            int layer_nodes = int.Parse(curText[1]);

                            layers[j] = new ReluLayer(layer_nodes);
                        }
                        else if (curText[0] == "do")
                        {
                            int layer_nodes = int.Parse(curText[1]);

                            layers[j] = new DropoutLayer(layer_nodes);
                        }
                        else if (curText[0] == "lrelu")
                        {
                            int layer_nodes = int.Parse(curText[1]);

                            layers[j] = new LeakyReluLayer(layer_nodes);
                        }
                        if (j == layers.Length - 1)
                        {
                            layers[j].weights = new Matrix(1, 1);
                        }
                    }
                }
                else if (layers[i - 1].weights == null)
                {
                    int index = i - 1;
                    values = sections[i].Split(";".ToCharArray());

                    int rows = values.Length;
                    int cols = values[0].Split(" ".ToCharArray()).Length;

                    Matrix m = new Matrix(rows, cols);

                    for (int j = 0; j < values.Length; j++)
                    {
                        String[] comp = values[j].Split(" ".ToCharArray());

                        for (int k = 0; k < comp.Length; k++)
                        {
                            m.data[j, k] = float.Parse(comp[k]);
                        }
                    }

                    Matrix.table(m);

                    layers[index].weights = m;
                }
            }

            NeuralNetwork nn = new NeuralNetwork(layers);

            return(nn);
        }
コード例 #26
0
        static void createNeuralNetwork(String[] words)
        {
            Random r = Lib.NeuralNetwork.random;

            try
            {
                if (Char.IsDigit(words[1].ToCharArray()[0]))
                {
                    int[] nodes = new int[words.Length - 1];
                    for (int i = 0; i < nodes.Length; i++)
                    {
                        nodes[i] = int.Parse(words[i + 1]);
                    }
                    nn = new Lib.NeuralNetwork(r, nodes);
                }
                else
                {
                    Layer[] layers = new Layer[(words.Length - 1) / 2];

                    for (int i = 1; i < words.Length; i += 2)
                    {
                        if (words[i].Equals("relu"))
                        {
                            int nodes = int.Parse(words[i + 1]);
                            layers[(i - 1) / 2] = new ReluLayer(nodes);
                        }
                        else if (words[i].Equals("do"))
                        {
                            int nodes = int.Parse(words[i + 1]);
                            layers[(i - 1) / 2] = new DropoutLayer(nodes);
                        }
                        else if (words[i].Equals("lrelu"))
                        {
                            int nodes = int.Parse(words[i + 1]);
                            layers[(i - 1) / 2] = new Lib.Layers.LeakyReluLayer(nodes);
                        }
                        else
                        {
                            Console.WriteLine("There is no layer type like \"" + words[i] + "\"");
                            return;
                        }
                    }

                    for (int i = 0; i < layers.Length; i++)
                    {
                        if (i != layers.Length - 1)
                        {
                            layers[i].weights = new Matrix(layers[i + 1].nodes, layers[i].nodes);
                            layers[i].weights.randomize(r);
                        }

                        layers[i].bias = new Matrix(layers[i].nodes, 1);
                        layers[i].bias.randomize(r);
                    }

                    nn = new Lib.NeuralNetwork(Lib.NeuralNetwork.random, layers);
                }
            } catch (Exception e)
            {
                printError(e);
                return;
            }
            Console.WriteLine("Neural Network was created succesfully!");
        }
コード例 #27
0
        public static Network CreateVGG16Network(int imageWidth, int imageHeight, int LabelsCount)
        {
            Network net = new Network();

            InputLayer il = new InputLayer();

            il.OutputWidth  = imageWidth;
            il.OutputHeight = imageHeight;
            il.OutputDepth  = 3;
            net.Layers.Add(il);

            ConvLayer conv1_1 = new ConvLayer(64, 3, 3, 3, imageWidth, imageHeight, 1, 1, 0, 1, 0.1f);

            net.Layers.Add(conv1_1);

            ReluLayer rl1 = new ReluLayer(conv1_1.OutputDepth, conv1_1.OutputWidth, conv1_1.OutputHeight);

            net.Layers.Add(rl1);


            ConvLayer conv1_2 = new ConvLayer(64, 3, 3, rl1.OutputDepth, rl1.OutputWidth, rl1.OutputHeight, 1, 1, 0, 1, 0.1f);

            net.Layers.Add(conv1_2);

            ReluLayer rl2 = new ReluLayer(conv1_2.OutputDepth, conv1_2.OutputWidth, conv1_2.OutputHeight);

            net.Layers.Add(rl2);

            MaxPoolLayer pl1 = new MaxPoolLayer(2, 2, rl2.OutputDepth, rl2.OutputWidth, rl2.OutputHeight, 2, 0);

            net.Layers.Add(pl1);


            ConvLayer conv2_1 = new ConvLayer(128, 3, 3, pl1.OutputDepth, pl1.OutputWidth, pl1.OutputHeight, 1, 1, 0, 1, 0.1f);

            net.Layers.Add(conv2_1);

            ReluLayer rl3 = new ReluLayer(conv2_1.OutputDepth, conv2_1.OutputWidth, conv2_1.OutputHeight);

            net.Layers.Add(rl3);

            ConvLayer conv2_2 = new ConvLayer(128, 3, 3, rl3.OutputDepth, rl3.OutputWidth, rl3.OutputHeight, 1, 1, 0, 1, 0.1f);

            net.Layers.Add(conv2_2);

            ReluLayer rl4 = new ReluLayer(conv2_2.OutputDepth, conv2_2.OutputWidth, conv2_2.OutputHeight);

            net.Layers.Add(rl4);

            MaxPoolLayer pl2 = new MaxPoolLayer(2, 2, rl4.OutputDepth, rl4.OutputWidth, rl4.OutputHeight, 2, 0);

            net.Layers.Add(pl2);

            ConvLayer conv3_1 = new ConvLayer(256, 3, 3, pl2.OutputDepth, pl2.OutputWidth, pl2.OutputHeight, 1, 1, 0, 1, 0.1f);

            net.Layers.Add(conv3_1);

            ReluLayer rl5 = new ReluLayer(conv3_1.OutputDepth, conv3_1.OutputWidth, conv3_1.OutputHeight);

            net.Layers.Add(rl5);

            ConvLayer conv3_2 = new ConvLayer(256, 3, 3, rl5.OutputDepth, rl5.OutputWidth, rl5.OutputHeight, 1, 1, 0, 1, 0.1f);

            net.Layers.Add(conv3_2);

            ReluLayer rl6 = new ReluLayer(conv3_2.OutputDepth, conv3_2.OutputWidth, conv3_2.OutputHeight);

            net.Layers.Add(rl6);


            ConvLayer conv3_3 = new ConvLayer(256, 3, 3, rl6.OutputDepth, rl6.OutputWidth, rl6.OutputHeight, 1, 1, 0, 1, 0.1f);

            net.Layers.Add(conv3_3);

            ReluLayer rl7 = new ReluLayer(conv3_3.OutputDepth, conv3_3.OutputWidth, conv3_3.OutputHeight);

            net.Layers.Add(rl7);

            MaxPoolLayer pl3 = new MaxPoolLayer(2, 2, rl7.OutputDepth, rl7.OutputWidth, rl7.OutputHeight, 2, 0);

            net.Layers.Add(pl3);

            ConvLayer conv4_1 = new ConvLayer(512, 3, 3, pl3.OutputDepth, pl3.OutputWidth, pl3.OutputHeight, 1, 1, 0, 1, 0.1f);

            net.Layers.Add(conv4_1);

            ReluLayer rl8 = new ReluLayer(conv4_1.OutputDepth, conv4_1.OutputWidth, conv4_1.OutputHeight);

            net.Layers.Add(rl8);

            ConvLayer conv4_2 = new ConvLayer(512, 3, 3, rl8.OutputDepth, rl8.OutputWidth, rl8.OutputHeight, 1, 1, 0, 1, 0.1f);

            net.Layers.Add(conv4_2);

            ReluLayer rl9 = new ReluLayer(conv4_2.OutputDepth, conv4_2.OutputWidth, conv4_2.OutputHeight);

            net.Layers.Add(rl9);


            ConvLayer conv4_3 = new ConvLayer(512, 3, 3, rl9.OutputDepth, rl9.OutputWidth, rl9.OutputHeight, 1, 1, 0, 1, 0.1f);

            net.Layers.Add(conv4_3);

            ReluLayer rl10 = new ReluLayer(conv4_3.OutputDepth, conv4_3.OutputWidth, conv4_3.OutputHeight);

            net.Layers.Add(rl10);

            MaxPoolLayer pl4 = new MaxPoolLayer(2, 2, rl10.OutputDepth, rl10.OutputWidth, rl10.OutputHeight, 2, 0);

            net.Layers.Add(pl4);

            ConvLayer conv5_1 = new ConvLayer(512, 3, 3, pl4.OutputDepth, pl4.OutputWidth, pl4.OutputHeight, 1, 1, 0, 1, 0.1f);

            net.Layers.Add(conv5_1);

            ReluLayer rl11 = new ReluLayer(conv5_1.OutputDepth, conv5_1.OutputWidth, conv5_1.OutputHeight);

            net.Layers.Add(rl11);

            ConvLayer conv5_2 = new ConvLayer(512, 3, 3, rl11.OutputDepth, rl11.OutputWidth, rl11.OutputHeight, 1, 1, 0, 1, 0.1f);

            net.Layers.Add(conv5_2);

            ReluLayer rl12 = new ReluLayer(conv5_2.OutputDepth, conv5_2.OutputWidth, conv5_2.OutputHeight);

            net.Layers.Add(rl12);


            ConvLayer conv5_3 = new ConvLayer(512, 3, 3, rl12.OutputDepth, rl12.OutputWidth, rl12.OutputHeight, 1, 1, 0, 1, 0.1f);

            net.Layers.Add(conv5_3);

            ReluLayer rl13 = new ReluLayer(conv5_3.OutputDepth, conv5_3.OutputWidth, conv5_3.OutputHeight);

            net.Layers.Add(rl13);

            MaxPoolLayer pl5 = new MaxPoolLayer(2, 2, rl13.OutputDepth, rl13.OutputWidth, rl13.OutputHeight, 2, 0);

            net.Layers.Add(pl5);

            FullyConnLayer fc = new FullyConnLayer(4096, pl5.OutputDepth,
                                                   pl5.OutputWidth, pl5.OutputHeight, 0, 1, 0);

            net.Layers.Add(fc);

            ReluLayer rl14 = new ReluLayer(fc.OutputDepth, fc.OutputWidth, fc.OutputHeight);

            net.Layers.Add(rl14);
            DropoutLayer d = new DropoutLayer(rl14.OutputDepth, rl14.OutputWidth, rl14.OutputHeight, 0.5f);

            net.Layers.Add(d);

            FullyConnLayer fc2 = new FullyConnLayer(4096, d.OutputDepth, d.OutputWidth, d.OutputHeight, 0, 1, 0);

            net.Layers.Add(fc2);

            ReluLayer rl15 = new ReluLayer(fc2.OutputDepth, fc2.OutputWidth, fc2.OutputHeight);

            net.Layers.Add(rl15);
            DropoutLayer d2 = new DropoutLayer(rl15.OutputDepth, rl15.OutputWidth, rl15.OutputHeight, 0.5f);

            net.Layers.Add(d2);


            FullyConnLayer fc3 = new FullyConnLayer(LabelsCount, d2.OutputDepth, d2.OutputWidth, d2.OutputHeight, 0, 1, 0);

            net.Layers.Add(fc3);

            SoftmaxLayer sl = new SoftmaxLayer(fc3.OutputDepth, fc3.OutputWidth, fc3.OutputHeight);

            net.LossLayer = sl;

            return(net);
        }
コード例 #28
0
        public void ReluLayer_Setup()
        {
            var layer = new ReluLayer();
            layer.Setup(bottom, top);

            Assert.Equal(bottom.Num, top.Num);
            Assert.Equal(bottom.Channels, top.Channels);
            Assert.Equal(bottom.Height, top.Height);
            Assert.Equal(bottom.Width, top.Width);
        }