Пример #1
0
        public void NN_backpropagation_2L_gnb_all_training_samples()
        {
            initData_dataset_gaussian_naive_bayes_jason_example();
            Build2LBackPropagation build =
                new Build2LBackPropagation();

            build.SetParameters(1);

            ModelBackPropagationBase model =
                (ModelBackPropagationBase)build.BuildModel(
                    _trainingData, _attributeHeaders,
                    _indexTargetAttribute);

            int count = 0;

            for (int row = 0; row < 10; row++)
            {
                double[] data  = GetSingleTrainingRowDataForTest(row);
                double   value = model.RunModelForSingleData(data);

                if (SupportFunctions.DoubleCompare(value,
                                                   _trainingData[_indexTargetAttribute][row]))
                {
                    count++;
                }
            }

            Assert.AreEqual(count,
                            10);
        }
Пример #2
0
        public void NN_backpropagation_generic_std_3L_gnb_all_training_samples()
        {
            initData_dataset_gaussian_naive_bayes_jason_example();
            BuildGenericBackPropagationStandard build =
                new BuildGenericBackPropagationStandard();

            build.SetParameters(1, 2, .5, 1500);
            //build.SetNumberOfHiddenLayers(2);
            build.AddHiddenLayer(0, 2, new Sigmoid());
            build.AddHiddenLayer(1, 2, new Sigmoid());
            build.SetOutputLayerActivationFunction(new Sigmoid());

            ModelBackPropagationBase model =
                (ModelBackPropagationBase)build.BuildModel(
                    _trainingData, _attributeHeaders,
                    _indexTargetAttribute);

            int count = 0;

            for (int row = 0; row < 10; row++)
            {
                double[] data  = GetSingleTrainingRowDataForTest(row);
                double   value = model.RunModelForSingleData(data);

                if (SupportFunctions.DoubleCompare(value,
                                                   _trainingData[_indexTargetAttribute][row]))
                {
                    count++;
                }
            }
            //Due to random weights
            Assert.IsTrue(count >= 5);
        }
Пример #3
0
        public void NN_backpropagation_2L_nb_custom_activation_SigSig_all_training_samples()
        {
            initData_dataset_naive_bayes_jason_example();
            Build2LBackPropagation build =
                new Build2LBackPropagation();

            build.SetParameters(1);

            build.SetActivationFunction(0, new Dasmic.MLLib.Algorithms.NeuralNetwork.Support.ActivationFunction.Sigmoid());
            build.SetActivationFunction(1, new Dasmic.MLLib.Algorithms.NeuralNetwork.Support.ActivationFunction.Sigmoid());

            ModelBackPropagationBase model =
                (ModelBackPropagationBase)build.BuildModel(
                    _trainingData, _attributeHeaders,
                    _indexTargetAttribute);

            int count = 0;

            for (int row = 0; row < 10; row++)
            {
                double[] data  = GetSingleTrainingRowDataForTest(row);
                double   value = model.RunModelForSingleData(data);

                if (SupportFunctions.DoubleCompare(value,
                                                   _trainingData[_indexTargetAttribute][row]))
                {
                    count++;
                }
            }

            //Due to random weights
            Assert.IsTrue(count >= 8 && count <= 10);
        }
Пример #4
0
        public void NN_backpropagation_generic_std_one_hidden_pythagoras_single_data_0()
        {
            Init_dataset_pythagoras();
            BuildGenericBackPropagationStandard build =
                new BuildGenericBackPropagationStandard();

            build.SetParameters(0, 1, .001, 2500, .001);
            //build.SetNumberOfHiddenLayers(1);
            build.AddHiddenLayer(0, 2, new Sigmoid());
            build.SetOutputLayerActivationFunction(new Linear());

            ModelBackPropagationBase model =
                (ModelBackPropagationBase)build.BuildModel(
                    _trainingData, _attributeHeaders,
                    _indexTargetAttribute);



            /*
             * model.SetWeight(1, 0, 0, .2196);
             * model.SetWeight(1, 1, 0, .121);
             * model.SetWeight(1, 2, 0, -4.18);
             *
             * model.SetWeight(1, 0, 1, .15367);
             * model.SetWeight(1, 1, 1, .2216);
             * model.SetWeight(1, 2, 1, -.99404);
             *
             * model.SetWeight(2, 0, 0, 15.43);
             * model.SetWeight(2, 1, 0, 12.92);
             * model.SetWeight(2, 2, 0, -3.33);*/

            System.Diagnostics.Debug.WriteLine("Weight[1][0][0]:" + model.GetWeight(1, 0, 0));
            System.Diagnostics.Debug.WriteLine("Weight[1][1][0]:" + model.GetWeight(1, 1, 0));
            System.Diagnostics.Debug.WriteLine("Weight[1][2][0]:" + model.GetWeight(1, 2, 0));

            System.Diagnostics.Debug.WriteLine("Weight[1][0][1]:" + model.GetWeight(1, 0, 1));
            System.Diagnostics.Debug.WriteLine("Weight[1][1][1]:" + model.GetWeight(1, 1, 1));
            System.Diagnostics.Debug.WriteLine("Weight[1][2][1]:" + model.GetWeight(1, 2, 1));

            System.Diagnostics.Debug.WriteLine("Weight[2][0][1]:" + model.GetWeight(2, 0, 0));
            System.Diagnostics.Debug.WriteLine("Weight[2][1][1]:" + model.GetWeight(2, 1, 0));
            System.Diagnostics.Debug.WriteLine("Weight[2][2][1]:" + model.GetWeight(2, 2, 0));

            int row = 0;

            double[] data  = GetSingleTrainingRowDataForTest(row);
            double   value = model.RunModelForSingleData(data);

            System.Diagnostics.Debug.WriteLine("Final value:" + value);
            //Actual answer is 1.41
            Assert.IsTrue(value > 1.0 && value < 3.0);
        }
        public void NN_backpropagation_generic_rprop_one_hidden_pythagoras_rmse_data_1()
        {
            Init_dataset_pythagoras();
            BuildGenericBackPropagationRprop build =
                new BuildGenericBackPropagationRprop();

            build.SetParameters(0, 1, .02, 20000, .1);//,.005,2000);
            build.AddHiddenLayer(0, 2, new Sigmoid());
            build.SetOutputLayerActivationFunction(new Linear());

            ModelBackPropagationBase model =
                (ModelBackPropagationBase)build.BuildModel(
                    _trainingData, _attributeHeaders,
                    _indexTargetAttribute);

            /*
             * model.SetWeight(1, 0, 0, .53);
             * model.SetWeight(1, 1, 0, .53);
             * model.SetWeight(1, 2, 0, .53);
             *
             * model.SetWeight(1, 0, 1, .53);
             * model.SetWeight(1, 1, 1, .53);
             * model.SetWeight(1, 2, 1, .53);
             *
             * model.SetWeight(2, 0, 0, .53);
             * model.SetWeight(2, 1, 0, .53);
             * model.SetWeight(2, 2, 0, .53);
             */

            System.Diagnostics.Debug.WriteLine("Weight[1][0][0]:" + model.GetWeight(1, 0, 0));
            System.Diagnostics.Debug.WriteLine("Weight[1][1][0]:" + model.GetWeight(1, 1, 0));
            System.Diagnostics.Debug.WriteLine("Weight[1][2][0]:" + model.GetWeight(1, 2, 0));

            System.Diagnostics.Debug.WriteLine("Weight[1][0][1]:" + model.GetWeight(1, 0, 1));
            System.Diagnostics.Debug.WriteLine("Weight[1][1][1]:" + model.GetWeight(1, 1, 1));
            System.Diagnostics.Debug.WriteLine("Weight[1][2][1]:" + model.GetWeight(1, 2, 1));

            System.Diagnostics.Debug.WriteLine("Weight[2][0][1]:" + model.GetWeight(2, 0, 0));
            System.Diagnostics.Debug.WriteLine("Weight[2][1][1]:" + model.GetWeight(2, 1, 0));
            System.Diagnostics.Debug.WriteLine("Weight[2][2][1]:" + model.GetWeight(2, 2, 0));

            int row = 0;

            double[] data  = GetSingleTrainingRowDataForTest(row);
            double   value = model.RunModelForSingleData(data);

            Assert.IsTrue(value <= 1.69);
        }
Пример #6
0
        public void NN_backpropagation_2L_gnb_single_training_sample_class_0()
        {
            initData_dataset_gaussian_naive_bayes_jason_example();
            Build2LBackPropagation build =
                new Build2LBackPropagation();

            build.SetParameters(1);
            ModelBackPropagationBase model =
                (ModelBackPropagationBase)build.BuildModel(
                    _trainingData, _attributeHeaders,
                    _indexTargetAttribute);

            int row = 0;

            double[] data  = GetSingleTrainingRowDataForTest(row);
            double   value = model.RunModelForSingleData(data);

            Assert.AreEqual(value,
                            _trainingData[_indexTargetAttribute][row]);
        }
        public void NN_backpropagation_generic_rprop_gnb_single_training_sample_class_0()
        {
            initData_dataset_gaussian_naive_bayes_jason_example();
            BuildGenericBackPropagationRprop build =
                new BuildGenericBackPropagationRprop();

            build.SetParameters(1, 1);
            build.AddHiddenLayer(0, 2, new Sigmoid());
            build.SetOutputLayerActivationFunction(new Sigmoid());

            ModelBackPropagationBase model =
                (ModelBackPropagationBase )build.BuildModel(
                    _trainingData, _attributeHeaders,
                    _indexTargetAttribute);

            int row = 0;

            double[] data  = GetSingleTrainingRowDataForTest(row);
            double   value = model.RunModelForSingleData(data);

            Assert.AreEqual(value,
                            _trainingData[_indexTargetAttribute][row]);
        }