コード例 #1
0
        public void AddingRegressionLayerWithoutPrecedingFullConnLayerShouldThrow()
        {
            var net = new Net();

            net.AddLayer(new InputLayer(10, 10, 3));

            Assert.Throws(typeof(ArgumentException), () => net.AddLayer(new RegressionLayer(1)));
        }
コード例 #2
0
        public void BinaryNetSerializerTest()
        {
            var net = new Net();

            net.AddLayer(new InputLayer(5, 5, 3));
            var conv = new ConvLayer(2, 2, 16);

            net.AddLayer(conv);
            var fullycon = new FullyConnLayer(3);

            net.AddLayer(fullycon);
            net.AddLayer(new SoftmaxLayer(3));

            // Serialize (binary)
            using (var ms = new MemoryStream())
            {
                net.SaveBinary(ms);
                ms.Position = 0;

                // Deserialize (binary)
                Net deserialized = SerializationExtensions.LoadBinary(ms) as Net;

                // Make sure deserialized is identical to serialized
                Assert.IsNotNull(deserialized.Layers);
                Assert.AreEqual(net.Layers.Count, deserialized.Layers.Count);
                Assert.IsTrue(net.Layers[0] is InputLayer);

                var deserializedConv = net.Layers[1] as ConvLayer;
                Assert.NotNull(deserializedConv);
                Assert.NotNull(deserializedConv.Filters);
                Assert.AreEqual(16, deserializedConv.Filters.Count);
                for (int i = 0; i < deserializedConv.Filters.Count; i++)
                {
                    for (int k = 0; k < deserializedConv.Filters[i].Length; k++)
                    {
                        Assert.AreEqual(conv.Filters[i].Get(k), deserializedConv.Filters[i].Get(k));
                        Assert.AreEqual(conv.Filters[i].GetGradient(k), deserializedConv.Filters[i].GetGradient(k));
                    }
                }

                var deserializedFullyCon = net.Layers[2] as FullyConnLayer;
                Assert.NotNull(deserializedFullyCon);
                Assert.NotNull(deserializedFullyCon.Filters);
                Assert.AreEqual(3, deserializedFullyCon.Filters.Count);
                for (int i = 0; i < deserializedFullyCon.Filters.Count; i++)
                {
                    for (int k = 0; k < deserializedFullyCon.Filters[i].Length; k++)
                    {
                        Assert.AreEqual(fullycon.Filters[i].Get(k), deserializedFullyCon.Filters[i].Get(k));
                        Assert.AreEqual(fullycon.Filters[i].GetGradient(k), deserializedFullyCon.Filters[i].GetGradient(k));
                    }
                }

                Assert.IsTrue(deserialized.Layers[3] is SoftmaxLayer);
                Assert.AreEqual(3, ((SoftmaxLayer)deserialized.Layers[3]).ClassCount);
            }
        }
コード例 #3
0
        public void IncorrectNeuronCountWithRegressionLayer()
        {
            var net = new Net();

            net.AddLayer(new InputLayer(10, 10, 3));
            net.AddLayer(new FullyConnLayer(5)); // should be 10

            Assert.Throws(typeof(ArgumentException), () => net.AddLayer(new RegressionLayer(10)));
        }
コード例 #4
0
        public void AddingClassificationLayerWithoutPrecedingFullConnLayerShouldThrow()
        {
            var net = new Net();

            net.AddLayer(new InputLayer(10, 10, 3));

            Assert.Throws(typeof(ArgumentException), () => net.AddLayer(new SoftmaxLayer(10)));

            Assert.Throws(typeof(ArgumentException), () => net.AddLayer(new SvmLayer(10)));
        }
コード例 #5
0
        public static Net <float> Build(int width, int height, int batchSize)
        {
            var net = new Net <float>();

            net.AddLayer(new InputLayer(width, height, 1));
            // 0.5*((in - 1) * stride + width - in) = pad
            // pad = 0.5*(width + in * stride - stride)
            // With stride = width = 3, pad = in
            net.AddLayer(ConvFromInput(3, width, 1, 64));
            net.AddLayer(new ReluLayer());
            net.AddLayer(ConvFromInput(3, width, 1, 64));
            net.AddLayer(new ReluLayer());
            //net.AddLayer(new DropoutLayer(0.3f));
            //net.AddLayer(ConvFromInput(3, width, 1, 64));
            //net.AddLayer(new ReluLayer());
            //net.AddLayer(new DropoutLayer(0.3f));
            //net.AddLayer(new ConvLayer(2, 2, 16) { Stride = 1 });
            net.AddLayer(ConvFromInput(3, width, 1, 64));
            net.AddLayer(new ReluLayer());
            //net.AddLayer(new DropoutLayer(0.3f));
            //net.AddLayer(new FullyConnLayer(width * height));
            //net.AddLayer(new ReluLayer());
            net.AddLayer(ConvFromInput(3, width, 1, 1));

            //net.AddLayer(new ReluLayer());
            net.AddLayer(new RegressionLayer());

            return(net);
        }
コード例 #6
0
        public void JsonNetSerializerTest()
        {
            var net = new Net();

            net.AddLayer(new InputLayer(5, 5, 3));
            var conv = new ConvLayer(2, 2, 16);

            net.AddLayer(conv);
            var fullycon = new FullyConnLayer(3);

            net.AddLayer(fullycon);
            net.AddLayer(new SoftmaxLayer(3));

            // Serialize to json
            var json = net.ToJSON();

            // Deserialize from json
            Net deserialized = SerializationExtensions.FromJSON(json);

            // Make sure deserialized is identical to serialized
            Assert.IsNotNull(deserialized.Layers);
            Assert.AreEqual(net.Layers.Count, deserialized.Layers.Count);
            Assert.IsTrue(net.Layers[0] is InputLayer);

            var deserializedConv = net.Layers[1] as ConvLayer;

            Assert.NotNull(deserializedConv);
            Assert.NotNull(deserializedConv.Filters);
            Assert.AreEqual(16, deserializedConv.Filters.Count);
            for (int i = 0; i < deserializedConv.Filters.Count; i++)
            {
                for (int k = 0; k < deserializedConv.Filters[i].Length; k++)
                {
                    Assert.AreEqual(conv.Filters[i].Get(k), deserializedConv.Filters[i].Get(k));
                    Assert.AreEqual(conv.Filters[i].GetGradient(k), deserializedConv.Filters[i].GetGradient(k));
                }
            }

            var deserializedFullyCon = net.Layers[2] as FullyConnLayer;

            Assert.NotNull(deserializedFullyCon);
            Assert.NotNull(deserializedFullyCon.Filters);
            Assert.AreEqual(3, deserializedFullyCon.Filters.Count);
            for (int i = 0; i < deserializedFullyCon.Filters.Count; i++)
            {
                for (int k = 0; k < deserializedFullyCon.Filters[i].Length; k++)
                {
                    Assert.AreEqual(fullycon.Filters[i].Get(k), deserializedFullyCon.Filters[i].Get(k));
                    Assert.AreEqual(fullycon.Filters[i].GetGradient(k), deserializedFullyCon.Filters[i].GetGradient(k));
                }
            }

            Assert.IsTrue(net.Layers[3] is SoftmaxLayer);
            Assert.AreEqual(3, ((SoftmaxLayer)net.Layers[3]).ClassCount);
        }
コード例 #7
0
        public void NetSerialization()
        {
            var net = new Net <double>();

            net.AddLayer(new InputLayer(28, 28, 1));
            net.AddLayer(new ConvLayer(5, 5, 8)
            {
                Stride = 1, Pad = 2, BiasPref = 0.1
            });
            net.AddLayer(new ReluLayer());
            net.AddLayer(new PoolLayer(2, 2)
            {
                Stride = 2
            });
            net.AddLayer(new SigmoidLayer());
            net.AddLayer(new TanhLayer());
            net.AddLayer(new FullyConnLayer(10)
            {
                BiasPref = 0.2
            });
            net.AddLayer(new SoftmaxLayer(10));

            var json         = net.ToJson();
            var deserialized = SerializationExtensions.FromJson <double>(json);
        }
コード例 #8
0
        public void Init()
        {
            // species a 2-layer neural network with one hidden layer of 20 neurons
            Net        = new Net <double>();
            NetTrainer = new SgdTrainer(Net)
            {
                LearningRate = 0.02, L2Decay = 0.005
            };

            // input layer declares size of input. here: 2-D data
            // ConvNetJS works on 3-Dimensional volumes (width, height, depth), but if you're not dealing with images
            // then the first two dimensions (width, height) will always be kept at size 1
            //30 input nodes, one for each 10px of 300px ground to show obstacles as features
            Net.AddLayer(new InputLayer(1, 1, TrainerConfig.InputNodesCount));

            // declare 20 neurons
            Net.AddLayer(new FullyConnLayer(TrainerConfig.HiddenLayerNodesCount));

            // declare a ReLU (rectified linear unit non-linearity)
            Net.AddLayer(new ReluLayer());

            // declare a fully connected layer that will be used by the softmax layer
            Net.AddLayer(new FullyConnLayer(2));

            // declare the linear classifier on top of the previous hidden layer
            Net.AddLayer(new SoftmaxLayer(2));

            var batch = 50;

            for (var j = 0; j < batch; j++)
            {
                Train(GenerateTrainData(), true);
            }

            //var x =
            //    new Volume(new[] {0.0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0}, new Shape(30));

            //var prob = Forward(x);

            //// prob is a Volume. Volumes have a property Weights that stores the raw data, and WeightGradients that stores gradients
            //Console.WriteLine(prob.Get(0) >= 0.5 ? "Jump" : "Stay"); // prints e.g. 0.50101

            //NetTrainer.Train(x, new Volume(new[] { 0.0 }, new Shape(1, 1, 1, 1))); // train the network, specifying that x is class zero

            //var prob2 = Net.Forward(x);
            //Console.WriteLine("probability that x is class 0: " + prob2.Get(0));
            // now prints 0.50374, slightly higher than previous 0.50101: the networks
            // weights have been adjusted by the Trainer to give a higher probability to
            // the class we trained the network with (zero)
        }
コード例 #9
0
    // Use this for initialization
    void Start()
    {
        exp  = new Experience[experienceSize];
        expi = 0;
        expn = 0;
        t    = 0;
        r0   = -99f;

        // species a 2-layer neural network with one hidden layer of 20 neurons
        net = new Net();

        // input layer declares size of input. here: 2-D data
        // ConvNetSharp works on 3-Dimensional volumes (width, height, depth), but if you're not dealing with images
        // then the first two dimensions (width, height) will always be kept at size 1
        net.AddLayer(new InputLayer(1, 1, numStates));

        // declare 20 neurons, followed by ReLU (rectified linear unit non-linearity)
        net.AddLayer(new FullyConnLayer(hiddenNeurons, Activation.Relu));

        net.AddLayer(new FullyConnLayer(hiddenNeurons, Activation.Relu));

        // declare the linear classifier on top of the previous hidden layer
        net.AddLayer(new RegressionLayer(numActions));

        double[] weights = { 0.3, -0.5, 0.1, 0.9, 0.6 };



        // forward a random data point through the network
        var x = new Volume(weights);

        var prob = net.Forward(x);

        // prob is a Volume. Volumes have a property Weights that stores the raw data, and WeightGradients that stores gradients
        Debug.Log("probability that x is class 0: " + prob.Weights[0]); // prints e.g. 0.50101

        trainer = new SgdTrainer(net)
        {
            LearningRate = 0.01, L2Decay = 0.001, Momentum = 0.0, BatchSize = 1
        };

        //trainer.Train(x, 0); // train the network, specifying that x is class zero

        Volume prob2 = net.Forward(x);

        Debug.Log("probability that x is class 0: " + prob2.Weights[0]);
        // now prints 0.50374, slightly higher than previous 0.50101: the networks
        // weights have been adjusted by the Trainer to give a higher probability to
        // the class we trained the network with (zero)
    }
コード例 #10
0
ファイル: Program.cs プロジェクト: zaharPonimash/ConvNetSharp
        /// <summary>
        ///     This sample shows how to serialize and deserialize a ConvNetSharp.Core network
        ///     1) Network creation
        ///     2) Dummy Training (only use a single data point)
        ///     3) Serialization
        ///     4) Deserialization
        /// </summary>
        private static void Main()
        {
            // 1) Network creation
            var net = new Net <double>();

            net.AddLayer(new InputLayer(1, 1, 2));
            net.AddLayer(new FullyConnLayer(20));
            net.AddLayer(new ReluLayer());
            net.AddLayer(new FullyConnLayer(10));
            net.AddLayer(new SoftmaxLayer(10));

            // 2) Dummy Training (only use a single data point)
            var x = BuilderInstance.Volume.From(new[] { 0.3, -0.5 }, new Shape(2));
            var y = BuilderInstance.Volume.From(new[] { 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 }, new Shape(10));

            var count   = 0;
            var trainer = new SgdTrainer(net)
            {
                LearningRate = 0.01
            };

            do
            {
                trainer.Train(x, y); // train the network, specifying that x is class zero
                Console.WriteLine($"Loss: {trainer.Loss}");
                count++;
            } while (trainer.Loss > 1e-2);

            Console.WriteLine($"{count}");

            // Forward pass with original network
            var prob1 = net.Forward(x);

            Console.WriteLine("probability that x is class 0: " + prob1.Get(0));

            // 3) Serialization
            var json = net.ToJson();

            // 4) Deserialization
            var deserialized = SerializationExtensions.FromJson <double>(json);

            // Forward pass with deserialized network
            var prob2 = deserialized.Forward(x);

            Console.WriteLine("probability that x is class 0: " + prob2.Get(0)); // This should give exactly the same result as previous network evaluation

            Console.ReadLine();
        }
コード例 #11
0
ファイル: Program.cs プロジェクト: NSqda/XCoinTrader
 public void BuildNetwork(Net <double> net)
 {
     net.AddLayer(new InputLayer(1, 1, screenDepth));
     net.AddLayer(new FullyConnLayer(128));
     net.AddLayer(new ReluLayer());
     net.AddLayer(new FullyConnLayer(128));
     net.AddLayer(new ReluLayer());
     net.AddLayer(new FullyConnLayer(16));
     net.AddLayer(new ReluLayer());
     net.AddLayer(new FullyConnLayer(numAction));
     net.AddLayer(new SoftmaxLayer(3));
 }
コード例 #12
0
        public static Net<T> Build<T>(this LayerBase<T> layer) where T : struct, IEquatable<T>, IFormattable
        {
            var net = new Net<T>();
            net.AddLayer(layer);

            return net;
        }
コード例 #13
0
        public void BiasPrefUpdateWhenAddingReluLayer()
        {
            var net = new Net();

            net.AddLayer(new InputLayer(10, 10, 3));
            var dotProduct1 = new FullyConnLayer(5);

            net.AddLayer(dotProduct1);
            net.AddLayer(new ReluLayer());
            var dotProduct2 = new ConvLayer(5, 5, 3);

            net.AddLayer(dotProduct2);
            net.AddLayer(new ReluLayer());

            Assert.AreEqual(0.1, dotProduct1.BiasPref);
            Assert.AreEqual(0.1, dotProduct2.BiasPref);
        }
コード例 #14
0
        public static Network CreateNew()
        {
            var net = new Net <double>();

            net.AddLayer(new InputLayer(256, 256, 3));

            // ToDo: Pad ConvLayers?

            net.AddLayer(new ConvLayer(16, 16, 192)
            {
                Stride = 12
            });
            net.AddLayer(new ReluLayer());
            net.AddLayer(new PoolLayer(6, 6)
            {
                Stride = 6
            });

            net.AddLayer(new FullyConnLayer(512));

            net.AddLayer(new FullyConnLayer(1));

            net.AddLayer(new RegressionLayer());

            return(new Network(net));
        }
コード例 #15
0
        private static void Main()
        {
            // species a 2-layer neural network with one hidden layer of 20 neurons
            var net = new Net <double>();

            // input layer declares size of input. here: 2-D data
            // ConvNetJS works on 3-Dimensional volumes (width, height, depth), but if you're not dealing with images
            // then the first two dimensions (width, height) will always be kept at size 1
            net.AddLayer(new InputLayer(1, 1, 2));

            // declare 20 neurons
            net.AddLayer(new FullyConnLayer(20));

            // declare a ReLU (rectified linear unit non-linearity)
            net.AddLayer(new ReluLayer());

            // declare a fully connected layer that will be used by the softmax layer
            net.AddLayer(new FullyConnLayer(10));

            // declare the linear classifier on top of the previous hidden layer
            net.AddLayer(new SoftmaxLayer(10));

            // forward a random data point through the network
            var x = BuilderInstance.Volume.From(new[] { 0.3, -0.5 }, new Shape(2));

            var prob = net.Forward(x);

            // prob is a Volume. Volumes have a property Weights that stores the raw data, and WeightGradients that stores gradients
            Console.WriteLine("probability that x is class 0: " + prob.Get(0)); // prints e.g. 0.50101

            var trainer = new SgdTrainer(net)
            {
                LearningRate = 0.01, L2Decay = 0.001
            };

            trainer.Train(x, BuilderInstance.Volume.From(new[] { 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 }, new Shape(1, 1, 10, 1))); // train the network, specifying that x is class zero

            var prob2 = net.Forward(x);

            Console.WriteLine("probability that x is class 0: " + prob2.Get(0));
            // now prints 0.50374, slightly higher than previous 0.50101: the networks
            // weights have been adjusted by the Trainer to give a higher probability to
            // the class we trained the network with (zero)
        }
コード例 #16
0
        public static void Classify2DDemo()
        {
            var net = new Net <double>();

            net.AddLayer(new InputLayer <double>());
            net.AddLayer(new FullyConnLayer <double>(6));
            net.AddLayer(new TanhLayer <double>());
            net.AddLayer(new FullyConnLayer <double>(2));
            net.AddLayer(new TanhLayer <double>());
            net.AddLayer(new FullyConnLayer <double>(2));
            net.AddLayer(new SoftmaxLayer <double>());

            // Data
            var data   = new List <double[]>();
            var labels = new List <int>();

            data.Add(new[] { -0.4326, 1.1909 });
            labels.Add(1);
            data.Add(new[] { 3.0, 4.0 });
            labels.Add(1);
            data.Add(new[] { 0.1253, -0.0376 });
            labels.Add(1);
            data.Add(new[] { 0.2877, 0.3273 });
            labels.Add(1);
            data.Add(new[] { -1.1465, 0.1746 });
            labels.Add(1);
            data.Add(new[] { 1.8133, 1.0139 });
            labels.Add(0);
            data.Add(new[] { 2.7258, 1.0668 });
            labels.Add(0);
            data.Add(new[] { 1.4117, 0.5593 });
            labels.Add(0);
            data.Add(new[] { 4.1832, 0.3044 });
            labels.Add(0);
            data.Add(new[] { 1.8636, 0.1677 });
            labels.Add(0);
            data.Add(new[] { 0.5, 3.2 });
            labels.Add(1);
            data.Add(new[] { 0.8, 3.2 });
            labels.Add(1);
            data.Add(new[] { 1.0, -2.2 });
            labels.Add(1);
            var n = labels.Count;

            var trainer = new SgdTrainer <double>(net, 0.01);

            do
            {
                Classify2DUpdate(n, data, trainer, labels);
            } while (!Console.KeyAvailable);

            // Display graph
            var vm  = new ViewModel <double>(net.Cost);
            var app = new Application();

            app.Run(new GraphControl {
                DataContext = vm
            });
        }
コード例 #17
0
        public Brain(int numInputs, int numActions)
        {
            CreatedDate  = DateTime.Now;
            TrainingTime = new TimeSpan();

            NumInputs  = numInputs;
            NumActions = numActions;

            // network
            var layer1N = (numInputs + numActions) / 2;

            Net = new Net();
            Net.AddLayer(new InputLayer(1, 1, numInputs));
            Net.AddLayer(new FullyConnLayer(layer1N));
            Net.AddLayer(new ReluLayer());
            Net.AddLayer(new FullyConnLayer(numActions));
            Net.AddLayer(new RegressionLayer());

            World = GridWorld.StandardState();
        }
コード例 #18
0
        /// <summary>
        /// Creates a layer
        /// </summary>
        /// <param name="type"> The type. </param>
        /// <param name="val1"> The first value.</param>
        /// <param name="val2"> The second value.</param>
        /// <param name="val3"> The third value. </param>
        internal void CreateLayer(LayerType type, double val1, double val2, double val3)
        {
            switch (type)
            {
            case LayerType.InputLayer:
                _net?.AddLayer(new InputLayer((int)val1, (int)val2, (int)val3));
                break;

            case LayerType.FullyConnLayer:
                _net?.AddLayer(new FullyConnLayer((int)val1));
                break;

            case LayerType.ReluLayer:
                _net?.AddLayer(new ReluLayer());
                break;

            case LayerType.SoftmaxLayer:
                _net?.AddLayer(new SoftmaxLayer((int)val1));
                break;
            }
        }
コード例 #19
0
 public CNN(int[] featureNum, int actionNum, int batchSize = 31)
 {
     //get channel
     _channel   = featureNum[0];
     _width     = featureNum[1];
     _height    = featureNum[2];
     _actionNum = actionNum;
     _batchSize = batchSize;
     //create cnn neural network
     _network = new Net <double>();
     _network.AddLayer(new InputLayer <double>(_width, _height, _channel));
     _network.AddLayer(new ConvLayer <double>(1, 1, 2)
     {
         Stride = 1, Pad = 2, BiasPref = 0.1f
     });
     _network.AddLayer(new ReluLayer <double>());
     _network.AddLayer(new PoolLayer <double>(2, 2)
     {
         Stride = 2
     });
     _network.AddLayer(new ConvLayer <double>(5, 5, 16)
     {
         Stride = 1, Pad = 2, BiasPref = 0.1f
     });
     _network.AddLayer(new ReluLayer <double>());
     _network.AddLayer(new PoolLayer <double>(3, 3)
     {
         Stride = 3
     });
     _network.AddLayer(new FullyConnLayer <double>(_actionNum));
     _network.AddLayer(new SoftmaxLayer <double>(_actionNum));
     //create trainer
     _trainer = new SgdTrainer <double>(_network)
     {
         LearningRate = 0.001,
         BatchSize    = batchSize,
         L2Decay      = 0.001,
         Momentum     = 0.9
     };
 }
コード例 #20
0
        private void CreateNetworkForTactile()
        {
            // Создаем сеть
            net = new Net();

            net.AddLayer(new InputLayer(inputWidth, inputHeight, inputDepth));

            // Ширина и высота рецептивного поля, количество фильтров
            net.AddLayer(new ConvLayer(3, 3, 8)
            {
                // Шаг скольжения свертки
                Stride = 1,
                // Заполнение краев нулями
                Pad = 1
            });
            net.AddLayer(new ReluLayer());

            // Ширина и высота окна уплотнения
            net.AddLayer(new PoolLayer(2, 2)
            {
                // Сдвиг
                Stride = 2
            });

            net.AddLayer(new ConvLayer(3, 3, 16)
            {
                Stride = 1,
                Pad    = 1
            });
            net.AddLayer(new ReluLayer());

            net.AddLayer(new PoolLayer(3, 3)
            {
                Stride = 3
            });

            net.AddLayer(new FullyConnLayer(names.Length));
            net.AddLayer(new SoftmaxLayer(names.Length));
        }
コード例 #21
0
        public void NetSerializationData()
        {
            var net = new Net <double>();

            net.AddLayer(new InputLayer(28, 28, 1));
            net.AddLayer(new ConvLayer(5, 5, 8)
            {
                Stride = 1, Pad = 2
            });
            net.AddLayer(new ReluLayer());
            net.AddLayer(new PoolLayer(2, 2)
            {
                Stride = 2
            });
            net.AddLayer(new ConvLayer(5, 5, 16)
            {
                Stride = 1, Pad = 2
            });
            net.AddLayer(new ReluLayer());
            net.AddLayer(new PoolLayer(3, 3)
            {
                Stride = 3
            });
            net.AddLayer(new FullyConnLayer(10));
            net.AddLayer(new SoftmaxLayer(10));

            var data = net.GetData();

            var layers = data["Layers"] as List <Dictionary <string, object> >;

            Assert.IsNotNull(layers);
            Assert.AreEqual(net.Layers.Count, layers.Count);

            var deserialized = Net <double> .FromData(data);

            Assert.AreEqual(net.Layers.Count, deserialized.Layers.Count);
        }
コード例 #22
0
ファイル: Program.cs プロジェクト: radioman/ConvNetSharp
        private static void Regression1DDemo()
        {
            var net = new Net();

            net.AddLayer(new InputLayer(1, 1, 1));
            net.AddLayer(new FullyConnLayer(20));
            net.AddLayer(new ReluLayer());
            net.AddLayer(new FullyConnLayer(20));
            net.AddLayer(new SigmoidLayer());
            net.AddLayer(new FullyConnLayer(1));
            net.AddLayer(new RegressionLayer());

            var trainer = new SgdTrainer(net)
            {
                LearningRate = 0.01, Momentum = 0.0, BatchSize = 1, L2Decay = 0.001
            };

            // Function we want to learn
            double[] x = { 0.0, 0.5, 1.0 };
            double[] y = { 0.0, 0.1, 0.2 };
            var      n = x.Length;

            // Training
            do
            {
                RegressionUpdate(n, x, trainer, y);
            } while (!Console.KeyAvailable);

            // Testing
            var netx = new Volume(1, 1, 1);

            for (var ix = 0; ix < n; ix++)
            {
                netx.Set(0, 0, 0, x[ix]);
                var result = net.Forward(netx);
            }
        }
コード例 #23
0
        public void JsonAnotherNetSerilizerTest()
        {
            var numInputs  = 64;
            var numActions = 5;
            var net        = new Net();

            net.AddLayer(new InputLayer(1, 1, numInputs));
            net.AddLayer(new FullyConnLayer((int)(numInputs * 2.5)));
            net.AddLayer(new ReluLayer());
            net.AddLayer(new FullyConnLayer((int)(numInputs * 1.5)));
            net.AddLayer(new ReluLayer());
            net.AddLayer(new FullyConnLayer(numActions));
            net.AddLayer(new RegressionLayer());

            // Serialize to json
            var json = net.ToJSON();

            // Deserialize from json
            var deserialized = SerializationExtensions.FromJSON(json);

            var input   = new Volume(1, 1, numInputs);
            var output1 = net.Forward(input);
            var output2 = deserialized.Forward(input);
        }
コード例 #24
0
        private static void Classify2DDemo()
        {
            var net = new Net <double>();

            net.AddLayer(new InputLayer(1, 1, 2));
            net.AddLayer(new FullyConnLayer(6));
            net.AddLayer(new TanhLayer());
            net.AddLayer(new FullyConnLayer(2));
            net.AddLayer(new TanhLayer());
            net.AddLayer(new FullyConnLayer(2));
            net.AddLayer(new SoftmaxLayer(2));

            // Data
            var data   = new List <double[]>();
            var labels = new List <int>();

            data.Add(new[] { -0.4326, 1.1909 });
            labels.Add(1);
            data.Add(new[] { 3.0, 4.0 });
            labels.Add(1);
            data.Add(new[] { 0.1253, -0.0376 });
            labels.Add(1);
            data.Add(new[] { 0.2877, 0.3273 });
            labels.Add(1);
            data.Add(new[] { -1.1465, 0.1746 });
            labels.Add(1);
            data.Add(new[] { 1.8133, 1.0139 });
            labels.Add(0);
            data.Add(new[] { 2.7258, 1.0668 });
            labels.Add(0);
            data.Add(new[] { 1.4117, 0.5593 });
            labels.Add(0);
            data.Add(new[] { 4.1832, 0.3044 });
            labels.Add(0);
            data.Add(new[] { 1.8636, 0.1677 });
            labels.Add(0);
            data.Add(new[] { 0.5, 3.2 });
            labels.Add(1);
            data.Add(new[] { 0.8, 3.2 });
            labels.Add(1);
            data.Add(new[] { 1.0, -2.2 });
            labels.Add(1);
            var n = labels.Count;

            var trainer = new SgdTrainer <double>(net)
            {
                LearningRate = 0.01, L2Decay = 0.001, BatchSize = n
            };

            // Training
            do
            {
                Classify2DUpdate(n, data, trainer, labels);
            } while (!Console.KeyAvailable);

            // Testing
            var netx = new Volume(new double[2 * n], new Shape(1, 1, 2, n));

            for (var ix = 0; ix < n; ix++)
            {
                netx.Set(0, 0, 0, ix, data[ix][0]);
                netx.Set(0, 0, 1, ix, data[ix][1]);
            }

            var result   = net.Forward(netx);
            var c        = net.GetPrediction();
            var accurate = c[0] == labels[0];
        }
コード例 #25
0
        private static void Main(string[] args)
        {
            // Load data

            var min_count       = 10;
            var polarity_cutoff = 0.1;

            var labels  = File.ReadAllLines("../../../../Data/labels.txt");
            var reviews = File.ReadAllLines("../../../../Data/reviews.txt");

            // Count words

            var vocab           = new Dictionary <string, int>();
            var positive_counts = new Dictionary <string, int>();
            var negative_counts = new Dictionary <string, int>();
            var pos_neg_ratios  = new Dictionary <string, double>();

            foreach (var pair in reviews.Zip(labels, (review, label) => new { review, label }))
            {
                var review = pair.review;
                var label  = pair.label;

                foreach (var word in review.ToLower().Split(' '))
                {
                    vocab.TryGetValue(word, out var count);
                    vocab[word] = count + 1;

                    var dico = label == "positive" ? positive_counts : negative_counts;
                    dico.TryGetValue(word, out count);
                    dico[word] = count + 1;

                    var otherDico = label == "positive" ? negative_counts : positive_counts;
                    otherDico.TryGetValue(word, out count);
                    otherDico[word] = count; // This is used to set count to 0 words that appear only on one side
                }
            }

            // Compute ratios

            foreach (var word in vocab.Keys)
            {
                if (vocab[word] > 50)
                {
                    var ratio = positive_counts[word] / (negative_counts[word] + 1.0);
                    if (ratio > 1.0)
                    {
                        pos_neg_ratios[word] = Math.Log(ratio);
                    }
                    else
                    {
                        pos_neg_ratios[word] = -Math.Log(1.0 / (ratio + 0.01));
                    }
                }
                else
                {
                    pos_neg_ratios[word] = 0.0;
                }
            }

            var review_vocab = vocab.Where(o => o.Value > min_count && Math.Abs(pos_neg_ratios[o.Key]) > polarity_cutoff).Select(o => o.Key).ToList();

            // Create word to index map

            var wordToIndex = review_vocab.Select((word, index) => new { word, index }).ToDictionary(o => o.word, o => o.index);

            // Build network

            var network = new Net <double>();

            network.AddLayer(new InputLayer(1, 1, review_vocab.Count));
            network.AddLayer(new FullyConnLayer(10));
            network.AddLayer(new FullyConnLayer(1));
            network.AddLayer(new RegressionLayer());

            // Training

            var trainer = new SgdTrainer(network)
            {
                LearningRate = 0.005
            };

            var input  = BuilderInstance.Volume.SameAs(new Shape(1, 1, review_vocab.Count));
            var output = BuilderInstance.Volume.SameAs(new Shape(1, 1, 1));

            var i       = 0;
            var correct = 0;

            for (var epoch = 0; epoch < 3; epoch++)
            {
                Console.WriteLine($"Epoch #{epoch}");

                foreach (var pair in reviews.Zip(labels, (review, label) => new { review, label }))
                {
                    var review = pair.review;
                    var label  = pair.label;
                    FillVolume(input, review, wordToIndex);

                    output.Set(0, 0, 0, pair.label == "positive" ? 1.0 : 0.0);

                    var test = network.Forward(input);
                    if (test > 0.5 && label == "positive" || test < 0.5 && label == "negative")
                    {
                        correct++;
                    }

                    trainer.Train(input, output);

                    if (i % 100 == 0)
                    {
                        Console.WriteLine($"Accuracy: {Math.Round(correct / (double)i * 100.0, 2)}%");
                        Console.WriteLine($"{i}/{reviews.Length}");
                    }

                    i++;
                    if (Console.KeyAvailable)
                    {
                        break;
                    }
                }
            }

            // Save Network

            File.WriteAllText(@"../../../../Model/sentiment.json", network.ToJson());
        }
コード例 #26
0
        public void CompareCoreVsFlow()
        {
            var inputWidth  = 28;
            var inputHeigth = 28;
            var inputDepth  = 3;
            var batchSize   = 20;

            #region Flow network

            var netFlow = new Net <T>();
            netFlow.AddLayer(new InputLayer <T>());
            var convLayerFlow1 = new ConvLayer <T>(5, 5, 8)
            {
                BiasPref = (T)Convert.ChangeType(0.1, typeof(T)), Stride = 1, Pad = 2
            };
            netFlow.AddLayer(convLayerFlow1);
            netFlow.AddLayer(new ReluLayer <T>());
            netFlow.AddLayer(new PoolLayer <T>(2, 2)
            {
                Stride = 2
            });
            var fullyConnLayerFlow = new FullyConnLayer <T>(10);
            netFlow.AddLayer(fullyConnLayerFlow);
            netFlow.AddLayer(new SoftmaxLayer <T>());

            var trainerFlow = new SgdTrainer <T>(netFlow, (T)Convert.ChangeType(0.01f, typeof(T)))
            {
                BatchSize = batchSize
            };

            #endregion

            #region Core network

            var netCore = new Core.Net <T>();
            netCore.AddLayer(new Core.Layers.InputLayer <T>(inputWidth, inputHeigth, inputDepth));
            var convLayerCore1 = new Core.Layers.ConvLayer <T>(5, 5, 8)
            {
                BiasPref = (T)Convert.ChangeType(0.1, typeof(T)), Stride = 1, Pad = 2
            };
            netCore.AddLayer(convLayerCore1);
            netCore.AddLayer(new Core.Layers.ReluLayer <T>());
            netCore.AddLayer(new Core.Layers.PoolLayer <T>(2, 2)
            {
                Stride = 2
            });
            var fullyConnLayerCore = new Core.Layers.FullyConnLayer <T>(10);
            netCore.AddLayer(fullyConnLayerCore);
            netCore.AddLayer(new Core.Layers.SoftmaxLayer <T>(10));

            var trainerCore = new Core.Training.SgdTrainer <T>(netCore)
            {
                LearningRate = (T)Convert.ChangeType(0.01f, typeof(T)),
                BatchSize    = batchSize
            };

            #endregion

            // Same weights
            var convfilterCore1 = netFlow.Session.GetVariableByName(netFlow.Op, (convLayerFlow1.Filter as IPersistable <T>).Name);
            convfilterCore1.Result = BuilderInstance <T> .Volume.SameAs(convLayerCore1.Filters.ToArray(), convLayerCore1.Filters.Shape);

            var fullyfilterCore = netFlow.Session.GetVariableByName(netFlow.Op, (fullyConnLayerFlow.Filter as IPersistable <T>).Name);
            fullyfilterCore.Result = BuilderInstance <T> .Volume.SameAs(fullyConnLayerCore.Filters.ToArray(), fullyConnLayerCore.Filters.Shape);

            // Create input
            var xStorage = new double[inputWidth * inputHeigth * inputDepth * batchSize].Populate(1.0);
            var x        = NewVolume(xStorage, Volume.Shape.From(inputWidth, inputHeigth, inputDepth, batchSize));

            // Create output
            var yStorage = new double[10 * batchSize];
            var y        = NewVolume(yStorage, Volume.Shape.From(1, 1, 10, batchSize));
            for (var i = 0; i < batchSize; i++)
            {
                y.Set(0, 0, i % 10, i, Ops <T> .One);
            }

            for (var k = 0; k < 10; k++)
            {
                xStorage = new double[inputWidth * inputHeigth * inputDepth * batchSize].Populate(1.0 + k);
                x        = NewVolume(xStorage, Volume.Shape.From(inputWidth, inputHeigth, inputDepth, batchSize));

                var flowResult = netFlow.Forward(x);
                var coreResult = netCore.Forward(x);

                var sum1 = BuilderInstance <T> .Volume.SameAs(new Shape(1));

                flowResult.DoSum(sum1);
                var sum2 = BuilderInstance <T> .Volume.SameAs(new Shape(1));

                coreResult.DoSum(sum2);
                var diff = Ops <T> .Subtract(sum1.Get(0), sum2.Get(0));

                Console.WriteLine(diff);

                AssertNumber.AreSequenceEqual(flowResult.ToArray(), coreResult.ToArray(), 1e-6);

                trainerCore.Train(x, y);
                trainerFlow.Train(x, y);
            }
        }
コード例 #27
0
        public void Start(List <User> userList, int neededUserId, int imageSize, Action <string> infoOutputFunc)
        {
            // Load data
            _userList       = userList;
            _neededUserId   = neededUserId;
            _imageSize      = imageSize;
            _stepCount      = 1;
            _infoOutputFunc = infoOutputFunc;

            // Create network
            _net = new Net();
            _net.AddLayer(new InputLayer(_imageSize, _imageSize, 1));
            _net.AddLayer(new ConvLayer(5, 5, 16)
            {
                Stride = 1, Pad = 1, Activation = Activation.Relu
            });
            _net.AddLayer(new PoolLayer(2, 2)
            {
                Stride = 2
            });
            _net.AddLayer(new ConvLayer(5, 5, 8)
            {
                Stride = 1, Pad = 1, Activation = Activation.Relu
            });
            _net.AddLayer(new PoolLayer(2, 2)
            {
                Stride = 2
            });
            _net.AddLayer(new SoftmaxLayer(2));

            _trainer = new Trainer(_net)
            {
                BatchSize      = 20,
                L2Decay        = 0.001,
                TrainingMethod = Trainer.Method.Adagrad
            };

            Stopwatch sw = Stopwatch.StartNew();

            do
            {
                var sample = GenerateTrainingInstance();
                if (!Step(sample))
                {
                    break;
                }
            } while (true);
            sw.Stop();

            if (_infoOutputFunc != null)
            {
                _infoOutputFunc(string.Format("\nВремя тренировки: {0} мск", sw.ElapsedMilliseconds));
            }

            Console.WriteLine(sw.ElapsedMilliseconds / 1000.0);

            // Output checking
            if (_infoOutputFunc != null)
            {
                _infoOutputFunc("\n\nId пользователя \t\t Неверный пользователь \t\t Верный пользователь");
            }

            foreach (User user in _userList.OrderByDescending(i => i.UserId))
            {
                Random random    = new Random();
                var    signature = user.SignatureList[random.Next(user.SignatureList.Count)];

                var x      = GetVolume(signature);
                var result = _net.Forward(x);

                if (_infoOutputFunc != null)
                {
                    _infoOutputFunc(string.Format("{0} \t\t\t {1} \t\t {2}", user.UserId, result.Weights[0], result.Weights[1]));
                }
            }
        }
コード例 #28
0
        public void Demo()
        {
            // Load data
            _userList = ReadingManager.ReadData(@"C:\Users\RustamSalakhutdinov\Documents\visual studio 2015\Projects\signatureChecker\data_new");

            // Create network
            _net = new Net();
            _net.AddLayer(new InputLayer(imageSize, imageSize, 1));
            _net.AddLayer(new ConvLayer(5, 5, 16)
            {
                Stride = 1, Pad = 1, Activation = Activation.Relu
            });
            _net.AddLayer(new PoolLayer(2, 2)
            {
                Stride = 2
            });
            _net.AddLayer(new ConvLayer(5, 5, 8)
            {
                Stride = 1, Pad = 1, Activation = Activation.Relu
            });
            _net.AddLayer(new PoolLayer(2, 2)
            {
                Stride = 2
            });
            _net.AddLayer(new SoftmaxLayer(2));

            _trainer = new Trainer(_net)
            {
                BatchSize      = 20,
                L2Decay        = 0.001,
                TrainingMethod = Trainer.Method.Adagrad
            };

            Stopwatch sw = Stopwatch.StartNew();

            do
            {
                var sample = GenerateTrainingInstance();
                if (!Step(sample))
                {
                    break;
                }
            } while (!Console.KeyAvailable);
            sw.Stop();
            Console.WriteLine(sw.ElapsedMilliseconds / 1000.0);

            foreach (User user in _userList)
            {
                Random random    = new Random();
                var    signature = user.SignatureList[random.Next(user.SignatureList.Count)];

                var x = new Volume(imageSize, imageSize, 1, 0.0);

                foreach (var point in signature.SignaturePointList)
                {
                    x.Weights[point.X * imageSize + point.Y] = 1;
                }

                x = x.Augment(imageSize);

                var result = _net.Forward(x);
                Console.WriteLine("UserId: {0}. Result: {1} | {2}", user.UserId, result.Weights[0], result.Weights[1]);
            }
        }
コード例 #29
0
        public ConvNetSharpNetwork()
        {
            network = new Net <double>();

            network.AddLayer(new InputLayer(50, 52, 1));
            network.AddLayer(new ConvLayer(3, 3, 8)
            {
                Stride = 1, Pad = 2
            });
            network.AddLayer(new ReluLayer());
            network.AddLayer(new PoolLayer(3, 3)
            {
                Stride = 2
            });
            network.AddLayer(new ConvLayer(3, 3, 16)
            {
                Stride = 1, Pad = 2
            });
            network.AddLayer(new ReluLayer());
            network.AddLayer(new PoolLayer(3, 3)
            {
                Stride = 2
            });
            network.AddLayer(new ConvLayer(3, 3, 32)
            {
                Stride = 1, Pad = 2
            });
            network.AddLayer(new FullyConnLayer(20));
            network.AddLayer(new FullyConnLayer(50));
            network.AddLayer(new FullyConnLayer(2));
            network.AddLayer(new SoftmaxLayer(2));

            trainer = GetTrainerForNetwork(network);
        }
コード例 #30
0
ファイル: Program.cs プロジェクト: konowrockis/lung-cancer
        private void CreateNetwork()
        {
            net = new Net <float>();

            net.AddLayer(new InputLayer(windowSize, windowSize, 1));
            net.AddLayer(new ConvLayer(5, 5, 3)
            {
                Stride = 1, Pad = 0
            });                                // 21 x 21
            net.AddLayer(new ReluLayer());     // 21 x 21
            net.AddLayer(new PoolLayer(2, 2)); // 10 x 10
            net.AddLayer(new ConvLayer(5, 5, 3)
            {
                Stride = 1, Pad = 0
            });                                                          // 6 x 6
            net.AddLayer(new ReluLayer());
            net.AddLayer(new PoolLayer(2, 2));
            net.AddLayer(new FullyConnLayer(16));
            net.AddLayer(new TanhLayer());
            net.AddLayer(new FullyConnLayer(2));
            net.AddLayer(new SoftmaxLayer(2));

            trainer = new SgdTrainer(net)
            {
                LearningRate = 0.01f, L2Decay = 0.001f
            };
        }