示例#1
0
        private ML.DeepMethods.Algorithms.BackpropAlgorithm createCNNAlg_NN_ForTest()
        {
            var cnn = new ConvNet(2, 1)
            {
                IsTraining = true
            };

            cnn.AddLayer(new DenseLayer(15, activation: Activation.Logistic(1)));
            cnn.AddLayer(new MaxPoolingLayer(1, 1));
            //cnn.AddLayer(new _ActivationLayer(Activation.Logistic(1)));
            cnn.AddLayer(new DropoutLayer(0.1));
            cnn.AddLayer(new FlattenLayer(3, activation: Activation.Logistic(1)));
            //cnn.AddLayer(new _ActivationLayer(Activation.Logistic(1)));
            cnn.AddLayer(new MaxPoolingLayer(1, 1));

            cnn._Build();
            cnn.RandomizeParameters(0);

            var sample = new ClassifiedSample <double[][, ]>();

            foreach (var obj in Data.TrainingSample)
            {
                var data = obj.Key;

                var key = new double[data.Length][, ];
                for (int i = 0; i < data.Length; i++)
                {
                    key[i] = new double[1, 1];
                }

                for (int i = 0; i < data.Length; i++)
                {
                    key[i][0, 0] = data[i];
                }
                sample[key] = obj.Value;
            }

            var alg = new ML.DeepMethods.Algorithms.BackpropAlgorithm(sample, cnn);

            alg.EpochCount   = 6000;
            alg.LearningRate = 0.01D;
            alg.BatchSize    = 1;
            alg.LossFunction = Loss.Euclidean;

            int epoch = 0;

            alg.EpochEndedEvent += (o, e) =>
            {
                if (epoch++ % 300 != 0)
                {
                    return;
                }
                Console.WriteLine("----------------Epoch #: {0}", epoch);
                Console.WriteLine("L:\t{0}", alg.LossValue);
                Console.WriteLine("DL:\t{0}", alg.LossDelta);
                Console.WriteLine("DW:\t{0}", alg.Step2);
            };

            return(alg);
        }
示例#2
0
        private ML.DeepMethods.Algorithms.BackpropAlgorithm createCNNAlg_NN_ForTest()
        {
            var cnn = new ConvNet(6, 1)
            {
                IsTraining = true
            };

            cnn.AddLayer(new DenseLayer(15, activation: Activation.Logistic(1)));
            cnn.AddLayer(new DenseLayer(15, activation: Activation.Logistic(1)));
            cnn.AddLayer(new DenseLayer(15, activation: Activation.Logistic(1)));
            //cnn.AddLayer(new MaxPoolingLayer(1, 1));
            //cnn.AddLayer(new _ActivationLayer(Activation.Logistic(1)));
            //cnn.AddLayer(new DropoutLayer(0.1));
            cnn.AddLayer(new FlattenLayer(2, activation: Activation.Logistic(1)));
            //cnn.AddLayer(new _ActivationLayer(Activation.Logistic(1)));
            //cnn.AddLayer(new MaxPoolingLayer(1, 1));

            cnn._Build();
            cnn.RandomizeParameters(0);

            var alg = new ML.DeepMethods.Algorithms.BackpropAlgorithm(cnn);

            alg.EpochCount   = 6000;
            alg.LearningRate = 0.01D;
            alg.BatchSize    = 1;
            alg.LossFunction = Loss.Euclidean;

            int epoch = 0;

            alg.EpochEndedEvent += (o, e) =>
            {
                if (epoch++ % 300 != 0)
                {
                    return;
                }
                Console.WriteLine("----------------Epoch #: {0}", epoch);
                Console.WriteLine("L:\t{0}", alg.LossValue);
                Console.WriteLine("DL:\t{0}", alg.LossDelta);
                Console.WriteLine("DW:\t{0}", alg.Step2);
            };

            return(alg);
        }