예제 #1
0
        public void NN_backpropagation_generic_std_3L_gnb_all_training_samples()
        {
            initData_dataset_gaussian_naive_bayes_jason_example();
            BuildGenericBackPropagationStandard build =
                new BuildGenericBackPropagationStandard();

            build.SetParameters(1, 2, .5, 1500);
            //build.SetNumberOfHiddenLayers(2);
            build.AddHiddenLayer(0, 2, new Sigmoid());
            build.AddHiddenLayer(1, 2, new Sigmoid());
            build.SetOutputLayerActivationFunction(new Sigmoid());

            ModelBackPropagationBase model =
                (ModelBackPropagationBase)build.BuildModel(
                    _trainingData, _attributeHeaders,
                    _indexTargetAttribute);

            int count = 0;

            for (int row = 0; row < 10; row++)
            {
                double[] data  = GetSingleTrainingRowDataForTest(row);
                double   value = model.RunModelForSingleData(data);

                if (SupportFunctions.DoubleCompare(value,
                                                   _trainingData[_indexTargetAttribute][row]))
                {
                    count++;
                }
            }
            //Due to random weights
            Assert.IsTrue(count >= 5);
        }
예제 #2
0
        public void NN_backpropagation_generic_std_invalid_layer_throws_exception()
        {
            initData_dataset_gaussian_naive_bayes_jason_example();
            BuildGenericBackPropagationStandard build =
                new BuildGenericBackPropagationStandard();

            build.SetParameters(1, 1, .5, 1500);
            build.AddHiddenLayer(0, 2, new Sigmoid());
            build.AddHiddenLayer(1, 2, new Sigmoid());
            build.SetOutputLayerActivationFunction(new Sigmoid());
        }
예제 #3
0
        public void NN_backpropagation_generic_std_missing_layer_throws_exception()
        {
            initData_dataset_gaussian_naive_bayes_jason_example();
            BuildGenericBackPropagationStandard build =
                new BuildGenericBackPropagationStandard();

            build.SetParameters(1, 3, .5, 1500);
            //build.SetNumberOfHiddenLayers(3);
            build.AddHiddenLayer(0, 2, new Sigmoid());
            build.AddHiddenLayer(1, 2, new Sigmoid());
            build.SetOutputLayerActivationFunction(new Sigmoid());

            ModelBackPropagationBase model =
                (ModelBackPropagationBase)build.BuildModel(
                    _trainingData, _attributeHeaders,
                    _indexTargetAttribute);
        }
예제 #4
0
        public void NN_backpropagation_generic_std_one_hidden_pythagoras_single_data_0()
        {
            Init_dataset_pythagoras();
            BuildGenericBackPropagationStandard build =
                new BuildGenericBackPropagationStandard();

            build.SetParameters(0, 1, .001, 2500, .001);
            //build.SetNumberOfHiddenLayers(1);
            build.AddHiddenLayer(0, 2, new Sigmoid());
            build.SetOutputLayerActivationFunction(new Linear());

            ModelBackPropagationBase model =
                (ModelBackPropagationBase)build.BuildModel(
                    _trainingData, _attributeHeaders,
                    _indexTargetAttribute);



            /*
             * model.SetWeight(1, 0, 0, .2196);
             * model.SetWeight(1, 1, 0, .121);
             * model.SetWeight(1, 2, 0, -4.18);
             *
             * model.SetWeight(1, 0, 1, .15367);
             * model.SetWeight(1, 1, 1, .2216);
             * model.SetWeight(1, 2, 1, -.99404);
             *
             * model.SetWeight(2, 0, 0, 15.43);
             * model.SetWeight(2, 1, 0, 12.92);
             * model.SetWeight(2, 2, 0, -3.33);*/

            System.Diagnostics.Debug.WriteLine("Weight[1][0][0]:" + model.GetWeight(1, 0, 0));
            System.Diagnostics.Debug.WriteLine("Weight[1][1][0]:" + model.GetWeight(1, 1, 0));
            System.Diagnostics.Debug.WriteLine("Weight[1][2][0]:" + model.GetWeight(1, 2, 0));

            System.Diagnostics.Debug.WriteLine("Weight[1][0][1]:" + model.GetWeight(1, 0, 1));
            System.Diagnostics.Debug.WriteLine("Weight[1][1][1]:" + model.GetWeight(1, 1, 1));
            System.Diagnostics.Debug.WriteLine("Weight[1][2][1]:" + model.GetWeight(1, 2, 1));

            System.Diagnostics.Debug.WriteLine("Weight[2][0][1]:" + model.GetWeight(2, 0, 0));
            System.Diagnostics.Debug.WriteLine("Weight[2][1][1]:" + model.GetWeight(2, 1, 0));
            System.Diagnostics.Debug.WriteLine("Weight[2][2][1]:" + model.GetWeight(2, 2, 0));

            int row = 0;

            double[] data  = GetSingleTrainingRowDataForTest(row);
            double   value = model.RunModelForSingleData(data);

            System.Diagnostics.Debug.WriteLine("Final value:" + value);
            //Actual answer is 1.41
            Assert.IsTrue(value > 1.0 && value < 3.0);
        }
예제 #5
0
        public void NN_backpropagation_generic_std_one_hidden_pythagoras_rmse()
        {
            Init_dataset_pythagoras();
            BuildGenericBackPropagationStandard build =
                new BuildGenericBackPropagationStandard();

            build.SetParameters(0, 1, .3, 4000);
            //build.SetNumberOfHiddenLayers(1);
            build.AddHiddenLayer(0, 2, new Sigmoid());
            build.SetOutputLayerActivationFunction(new Linear());

            ModelBackPropagationBase model =
                (ModelBackPropagationBase)build.BuildModel(
                    _trainingData, _attributeHeaders,
                    _indexTargetAttribute);

            double value = model.GetModelRMSE(_trainingData);

            Assert.IsTrue(value < 10.00);
        }
예제 #6
0
        public void NN_backpropagation_generic_std_gnb_single_training_sample_class_0()
        {
            initData_dataset_gaussian_naive_bayes_jason_example();
            BuildGenericBackPropagationStandard build =
                new BuildGenericBackPropagationStandard();

            build.SetParameters(1, 1);

            build.AddHiddenLayer(0, 2, new Sigmoid());
            build.SetOutputLayerActivationFunction(new Sigmoid());

            ModelBackPropagationBase model =
                (ModelBackPropagationBase )build.BuildModel(
                    _trainingData, _attributeHeaders,
                    _indexTargetAttribute);

            int row = 0;

            double[] data  = GetSingleTrainingRowDataForTest(row);
            double   value = model.RunModelForSingleData(data);

            Assert.AreEqual(value,
                            _trainingData[_indexTargetAttribute][row]);
        }