Exemple #1
0
        public void SerializationTest()
        {
            // Create a RegressionLayer
            var layer = new RegressionLayer();

            layer.Init(10, 10, 3);

            RegressionLayer deserialized;

            using (var ms = new MemoryStream())
            {
                // Serialize
                IFormatter formatter = new BinaryFormatter();
                formatter.Serialize(ms, layer);

                // Deserialize
                ms.Position  = 0;
                deserialized = formatter.Deserialize(ms) as RegressionLayer;
            }

            Assert.AreEqual(layer.InputDepth, deserialized.InputDepth);
            Assert.AreEqual(layer.InputHeight, deserialized.InputHeight);
            Assert.AreEqual(layer.InputWidth, deserialized.InputWidth);
            Assert.AreEqual(layer.OutputDepth, deserialized.OutputDepth);
            Assert.AreEqual(layer.OutputHeight, deserialized.OutputHeight);
            Assert.AreEqual(layer.OutputWidth, deserialized.OutputWidth);
        }
Exemple #2
0
        public static RegressionLayer Regression(this LayerBase layer)
        {
            var regression = new RegressionLayer();

            layer.ConnectTo(regression);

            return(regression);
        }
Exemple #3
0
        //public static SvmLayer<T> Svm<T>(this LayerBase<T> layer, int classCount) where T : struct, IEquatable<T>, IFormattable
        //{
        //    var svm = new SvmLayer<T>(classCount);
        //    layer.ConnectTo(svm);

        //    return svm;
        //}

        public static RegressionLayer <T> Regression <T>(this LayerBase <T> layer) where T : struct, IEquatable <T>, IFormattable
        {
            var regression = new RegressionLayer <T>();

            layer.ConnectTo(regression);

            return(regression);
        }
Exemple #4
0
        private static Network CreateNewNetwork()
        {
            var num_inputs      = 27; // 9 eyes, each sees 3 numbers (wall, green, red thing proximity)
            var num_actions     = 5;  // 5 possible angles agent can turn
            var temporal_window = 1;  // amount of temporal memory. 0 = agent lives in-the-moment :)
            var network_size    = num_inputs * temporal_window + num_actions * temporal_window + num_inputs;

            Network net = new Network();

            InputLayer il = new InputLayer();

            il.OutputWidth  = 1;
            il.OutputHeight = 1;
            il.OutputDepth  = network_size;
            net.Layers.Add(il);


            ConvLayer conv = new ConvLayer(16, 5, 5, il.OutputDepth, il.OutputWidth, il.OutputHeight, 1, 2, 0, 1, 0.1);

            net.Layers.Add(conv);

            ReluLayer rlv = new ReluLayer(conv.OutputDepth, conv.OutputWidth, conv.OutputHeight);

            net.Layers.Add(rlv);

            MaxPoolLayer pl = new MaxPoolLayer(2, 2, rlv.OutputDepth, rlv.OutputWidth, rlv.OutputHeight, 2, 0, 0);

            net.Layers.Add(pl);

            FullyConnLayer fc = new FullyConnLayer(50, pl.OutputDepth, pl.OutputWidth, pl.OutputHeight, 0, 1, 0);

            net.Layers.Add(fc);

            ReluLayer rl = new ReluLayer(fc.OutputDepth, fc.OutputWidth, fc.OutputHeight);

            net.Layers.Add(rl);



            FullyConnLayer fc2 = new FullyConnLayer(50, rl.OutputDepth, rl.OutputWidth, rl.OutputHeight, 0, 1, 0);

            net.Layers.Add(fc2);

            ReluLayer rl2 = new ReluLayer(fc2.OutputDepth, fc2.OutputWidth, fc2.OutputHeight);

            net.Layers.Add(rl2);



            FullyConnLayer fc8 = new FullyConnLayer(5, rl2.OutputDepth, rl2.OutputWidth, rl2.OutputHeight, 0, 1, 0);

            net.Layers.Add(fc8);

            RegressionLayer sl = new RegressionLayer(fc8.OutputDepth, fc8.OutputWidth, fc8.OutputHeight);

            net.LossLayer = sl;
            return(net);
        }
Exemple #5
0
        private static Network CreateNewNetwork()
        {
            Network net = new Network();

            InputLayer il = new InputLayer();

            il.OutputWidth  = 1;
            il.OutputHeight = 1;
            il.OutputDepth  = 2;
            net.Layers.Add(il);



            FullyConnLayer fc = new FullyConnLayer(50, il.OutputDepth, il.OutputWidth, il.OutputHeight, 0, 1, 0);

            net.Layers.Add(fc);

            ReluLayer rl = new ReluLayer(fc.OutputDepth, fc.OutputWidth, fc.OutputHeight);

            net.Layers.Add(rl);



            FullyConnLayer fc2 = new FullyConnLayer(50, rl.OutputDepth, rl.OutputWidth, rl.OutputHeight, 0, 1, 0);

            net.Layers.Add(fc2);

            ReluLayer rl2 = new ReluLayer(fc2.OutputDepth, fc2.OutputWidth, fc2.OutputHeight);

            net.Layers.Add(rl2);



            FullyConnLayer fc8 = new FullyConnLayer(3, rl2.OutputDepth, rl2.OutputWidth, rl2.OutputHeight, 0, 1, 0);

            net.Layers.Add(fc8);

            RegressionLayer sl = new RegressionLayer(fc8.OutputDepth, fc8.OutputWidth, fc8.OutputHeight);

            net.LossLayer = sl;
            return(net);
        }
Exemple #6
0
        public void RegressionLayerSerialization()
        {
            var layer = new RegressionLayer();

            layer.Init(28, 24, 1);
            var data = layer.GetData();

            Assert.AreEqual(28, data["InputWidth"]);
            Assert.AreEqual(24, data["InputHeight"]);
            Assert.AreEqual(1, data["InputDepth"]);


            var deserialized = LayerBase <double> .FromData(data) as RegressionLayer;

            Assert.IsNotNull(deserialized);
            Assert.AreEqual(28, deserialized.InputWidth);
            Assert.AreEqual(24, deserialized.InputHeight);
            Assert.AreEqual(1, deserialized.InputDepth);
            Assert.AreEqual(layer.OutputWidth, deserialized.OutputWidth);
            Assert.AreEqual(layer.OutputHeight, deserialized.OutputHeight);
            Assert.AreEqual(layer.OutputDepth, deserialized.OutputDepth);
        }