コード例 #1
0
        public void NN_backpropagation_2L_nb_custom_activation_SigLin_all_training_samples()
        {
            initData_dataset_naive_bayes_jason_example();
            Build2LBackPropagation build =
                new Build2LBackPropagation();

            build.SetParameters(1, .01, 2000, .01);

            build.SetActivationFunction(0, new Dasmic.MLLib.Algorithms.NeuralNetwork.Support.ActivationFunction.Sigmoid());
            build.SetActivationFunction(1, new Dasmic.MLLib.Algorithms.NeuralNetwork.Support.ActivationFunction.Linear());

            ModelBackPropagationBase model =
                (ModelBackPropagationBase)build.BuildModel(
                    _trainingData, _attributeHeaders,
                    _indexTargetAttribute);

            int count = 0;

            for (int row = 0; row < 10; row++)
            {
                double[] data  = GetSingleTrainingRowDataForTest(row);
                double   value = model.RunModelForSingleData(data);

                if (SupportFunctions.DoubleCompare(value,
                                                   _trainingData[_indexTargetAttribute][row]))
                {
                    count++;
                }
            }

            Assert.AreEqual(count,
                            8);
        }
コード例 #2
0
        public void NN_backpropagation_2L_nb_all_training_samples()
        {
            initData_dataset_naive_bayes_jason_example();
            Build2LBackPropagation build =
                new Build2LBackPropagation();

            build.SetParameters(1);

            ModelBackPropagationBase model =
                (ModelBackPropagationBase)build.BuildModel(
                    _trainingData, _attributeHeaders,
                    _indexTargetAttribute);

            int count = 0;

            for (int row = 0; row < 10; row++)
            {
                double[] data  = GetSingleTrainingRowDataForTest(row);
                double   value = model.RunModelForSingleData(data);

                if (SupportFunctions.DoubleCompare(value,
                                                   _trainingData[_indexTargetAttribute][row]))
                {
                    count++;
                }
            }

            //Due to random weights
            Assert.IsTrue(count >= 8 && count <= 10);
        }
コード例 #3
0
        public void NN_backpropagation_generic_std_one_hidden_pythagoras_single_data_0()
        {
            Init_dataset_pythagoras();
            BuildGenericBackPropagationStandard build =
                new BuildGenericBackPropagationStandard();

            build.SetParameters(0, 1, .001, 2500, .001);
            //build.SetNumberOfHiddenLayers(1);
            build.AddHiddenLayer(0, 2, new Sigmoid());
            build.SetOutputLayerActivationFunction(new Linear());

            ModelBackPropagationBase model =
                (ModelBackPropagationBase)build.BuildModel(
                    _trainingData, _attributeHeaders,
                    _indexTargetAttribute);



            /*
             * model.SetWeight(1, 0, 0, .2196);
             * model.SetWeight(1, 1, 0, .121);
             * model.SetWeight(1, 2, 0, -4.18);
             *
             * model.SetWeight(1, 0, 1, .15367);
             * model.SetWeight(1, 1, 1, .2216);
             * model.SetWeight(1, 2, 1, -.99404);
             *
             * model.SetWeight(2, 0, 0, 15.43);
             * model.SetWeight(2, 1, 0, 12.92);
             * model.SetWeight(2, 2, 0, -3.33);*/

            System.Diagnostics.Debug.WriteLine("Weight[1][0][0]:" + model.GetWeight(1, 0, 0));
            System.Diagnostics.Debug.WriteLine("Weight[1][1][0]:" + model.GetWeight(1, 1, 0));
            System.Diagnostics.Debug.WriteLine("Weight[1][2][0]:" + model.GetWeight(1, 2, 0));

            System.Diagnostics.Debug.WriteLine("Weight[1][0][1]:" + model.GetWeight(1, 0, 1));
            System.Diagnostics.Debug.WriteLine("Weight[1][1][1]:" + model.GetWeight(1, 1, 1));
            System.Diagnostics.Debug.WriteLine("Weight[1][2][1]:" + model.GetWeight(1, 2, 1));

            System.Diagnostics.Debug.WriteLine("Weight[2][0][1]:" + model.GetWeight(2, 0, 0));
            System.Diagnostics.Debug.WriteLine("Weight[2][1][1]:" + model.GetWeight(2, 1, 0));
            System.Diagnostics.Debug.WriteLine("Weight[2][2][1]:" + model.GetWeight(2, 2, 0));

            int row = 0;

            double[] data  = GetSingleTrainingRowDataForTest(row);
            double   value = model.RunModelForSingleData(data);

            System.Diagnostics.Debug.WriteLine("Final value:" + value);
            //Actual answer is 1.41
            Assert.IsTrue(value > 1.0 && value < 3.0);
        }
コード例 #4
0
        public void NN_backpropagation_generic_rprop_one_hidden_pythagoras_rmse_data_1()
        {
            Init_dataset_pythagoras();
            BuildGenericBackPropagationRprop build =
                new BuildGenericBackPropagationRprop();

            build.SetParameters(0, 1, .02, 20000, .1);//,.005,2000);
            build.AddHiddenLayer(0, 2, new Sigmoid());
            build.SetOutputLayerActivationFunction(new Linear());

            ModelBackPropagationBase model =
                (ModelBackPropagationBase)build.BuildModel(
                    _trainingData, _attributeHeaders,
                    _indexTargetAttribute);

            /*
             * model.SetWeight(1, 0, 0, .53);
             * model.SetWeight(1, 1, 0, .53);
             * model.SetWeight(1, 2, 0, .53);
             *
             * model.SetWeight(1, 0, 1, .53);
             * model.SetWeight(1, 1, 1, .53);
             * model.SetWeight(1, 2, 1, .53);
             *
             * model.SetWeight(2, 0, 0, .53);
             * model.SetWeight(2, 1, 0, .53);
             * model.SetWeight(2, 2, 0, .53);
             */

            System.Diagnostics.Debug.WriteLine("Weight[1][0][0]:" + model.GetWeight(1, 0, 0));
            System.Diagnostics.Debug.WriteLine("Weight[1][1][0]:" + model.GetWeight(1, 1, 0));
            System.Diagnostics.Debug.WriteLine("Weight[1][2][0]:" + model.GetWeight(1, 2, 0));

            System.Diagnostics.Debug.WriteLine("Weight[1][0][1]:" + model.GetWeight(1, 0, 1));
            System.Diagnostics.Debug.WriteLine("Weight[1][1][1]:" + model.GetWeight(1, 1, 1));
            System.Diagnostics.Debug.WriteLine("Weight[1][2][1]:" + model.GetWeight(1, 2, 1));

            System.Diagnostics.Debug.WriteLine("Weight[2][0][1]:" + model.GetWeight(2, 0, 0));
            System.Diagnostics.Debug.WriteLine("Weight[2][1][1]:" + model.GetWeight(2, 1, 0));
            System.Diagnostics.Debug.WriteLine("Weight[2][2][1]:" + model.GetWeight(2, 2, 0));

            int row = 0;

            double[] data  = GetSingleTrainingRowDataForTest(row);
            double   value = model.RunModelForSingleData(data);

            Assert.IsTrue(value <= 1.69);
        }
コード例 #5
0
        public void NN_backpropagation_generic_rprop_missing_layer_throws_exception()
        {
            initData_dataset_gaussian_naive_bayes_jason_example();
            BuildGenericBackPropagationRprop build =
                new BuildGenericBackPropagationRprop();

            build.SetParameters(1, 3, .5, 1500);
            build.AddHiddenLayer(0, 2, new Sigmoid());
            build.AddHiddenLayer(1, 2, new Sigmoid());
            build.SetOutputLayerActivationFunction(new Sigmoid());

            ModelBackPropagationBase model =
                (ModelBackPropagationBase)build.BuildModel(
                    _trainingData, _attributeHeaders,
                    _indexTargetAttribute);
        }
コード例 #6
0
        protected bool VerifyLayers(ModelBackPropagationBase model)
        {
            bool exceptionFlag = false;

            Parallel.For(0, model.GetNumberOfLayers() - 1,
                         new ParallelOptions {
                MaxDegreeOfParallelism = _maxParallelThreads
            },
                         idxLayer =>
            {
                if (model.GetLayer(idxLayer) == null)
                {
                    exceptionFlag = true;
                }
            });
            return(exceptionFlag);
        }
コード例 #7
0
        public void Deep_NN_generic_jason_simple_rmse()
        {
            Init_dataset_jason_linear_regression();
            BuildGenericDeepNN build =
                new BuildGenericDeepNN();

            build.SetParameters(0, 2);
            //Use Default Parameters
            ModelBackPropagationBase model =
                (ModelBackPropagationBase)build.BuildModel(
                    _trainingData, _attributeHeaders,
                    _indexTargetAttribute);

            double value = model.GetModelRMSE(_trainingData);

            Assert.IsTrue(value < .61 && value > 0);
        }
コード例 #8
0
        public void NN_backpropagation_generic_rprop_no_hidden_pythagoras_rmse()
        {
            Init_dataset_pythagoras();
            BuildGenericBackPropagationRprop build =
                new BuildGenericBackPropagationRprop();

            build.SetParameters(0, 0, .3, 1);
            build.SetOutputLayerActivationFunction(new Linear());

            ModelBackPropagationBase model =
                (ModelBackPropagationBase)build.BuildModel(
                    _trainingData, _attributeHeaders,
                    _indexTargetAttribute);

            double value = model.GetModelRMSE(_trainingData);

            Assert.IsTrue(value < 10);
        }
コード例 #9
0
        public void NN_backpropagation_generic_rprop_jason_simple_rmse()
        {
            Init_dataset_jason_linear_regression();
            BuildGenericBackPropagationRprop build =
                new BuildGenericBackPropagationRprop();

            build.SetParameters(0, 1, .01, 10000);
            build.AddHiddenLayer(0, 2, new Sigmoid());
            build.SetOutputLayerActivationFunction(new Linear());

            ModelBackPropagationBase model =
                (ModelBackPropagationBase)build.BuildModel(
                    _trainingData, _attributeHeaders,
                    _indexTargetAttribute);

            double value = model.GetModelRMSE(_trainingData);

            Assert.IsTrue(value < .61 && value > 0);
        }
コード例 #10
0
        public void NN_backpropagation_2L_jason_simple_rmse()
        {
            Init_dataset_jason_linear_regression();
            Build2LBackPropagation build =
                new Build2LBackPropagation();

            build.SetParameters(0, .01, 3000, .05);
            build.SetActivationFunction(0, new Dasmic.MLLib.Algorithms.NeuralNetwork.Support.ActivationFunction.Sigmoid());
            build.SetActivationFunction(1, new Dasmic.MLLib.Algorithms.NeuralNetwork.Support.ActivationFunction.Linear());

            ModelBackPropagationBase model =
                (ModelBackPropagationBase)build.BuildModel(
                    _trainingData, _attributeHeaders,
                    _indexTargetAttribute);

            double value = model.GetModelRMSE(_trainingData);

            Assert.IsTrue(value < 1.0);
        }
コード例 #11
0
        public void NN_backpropagation_generic_std_no_hidden_pythagoras_rmse()
        {
            Init_dataset_pythagoras();
            BuildGenericBackPropagationStandard build =
                new BuildGenericBackPropagationStandard();

            build.SetParameters(0, 0, .3, 1);
            //build.SetNumberOfHiddenLayers(0);
            //build.AddHiddenLayer(0, 2, new Sigmoid());
            build.SetOutputLayerActivationFunction(new Linear());

            ModelBackPropagationBase model =
                (ModelBackPropagationBase)build.BuildModel(
                    _trainingData, _attributeHeaders,
                    _indexTargetAttribute);

            double value = model.GetModelRMSE(_trainingData);

            Assert.IsTrue(value > 100000);
        }
コード例 #12
0
        public void NN_backpropagation_2L_gnb_single_training_sample_class_0()
        {
            initData_dataset_gaussian_naive_bayes_jason_example();
            Build2LBackPropagation build =
                new Build2LBackPropagation();

            build.SetParameters(1);
            ModelBackPropagationBase model =
                (ModelBackPropagationBase)build.BuildModel(
                    _trainingData, _attributeHeaders,
                    _indexTargetAttribute);

            int row = 0;

            double[] data  = GetSingleTrainingRowDataForTest(row);
            double   value = model.RunModelForSingleData(data);

            Assert.AreEqual(value,
                            _trainingData[_indexTargetAttribute][row]);
        }
コード例 #13
0
        public void NN_backpropagation_generic_rprop_gnb_single_training_sample_class_0()
        {
            initData_dataset_gaussian_naive_bayes_jason_example();
            BuildGenericBackPropagationRprop build =
                new BuildGenericBackPropagationRprop();

            build.SetParameters(1, 1);
            build.AddHiddenLayer(0, 2, new Sigmoid());
            build.SetOutputLayerActivationFunction(new Sigmoid());

            ModelBackPropagationBase model =
                (ModelBackPropagationBase )build.BuildModel(
                    _trainingData, _attributeHeaders,
                    _indexTargetAttribute);

            int row = 0;

            double[] data  = GetSingleTrainingRowDataForTest(row);
            double   value = model.RunModelForSingleData(data);

            Assert.AreEqual(value,
                            _trainingData[_indexTargetAttribute][row]);
        }