コード例 #1
0
        static void TestANNMiner(Dataset dataset)
        {
            IClassificationMeasure measure        = new AccuracyMeasure();
            ILearningMethod        learningMethod = new BackPropagation(0.1, 10, 0.9, false);

            int hiddenUnitCount = dataset.Metadata.Attributes.Length * dataset.Metadata.Target.Length;

            IActivationFunction activationFunction = new SigmoidActivationFunction();

            ISolutionQualityEvaluator <ConnectionDC> evaluator  = new NNClassificationQualityEvaluator(measure, learningMethod, hiddenUnitCount, activationFunction);
            IHeuristicsCalculator <ConnectionDC>     calculator = new DefaultHeuristicCalculator <ConnectionDC>();
            ILocalSearch <ConnectionDC>          localSearch    = new DefaultRemovalLocalSearch <ConnectionDC>(evaluator);
            IComponentInvalidator <ConnectionDC> invalidator    = new NNConnectorInvalidator();

            Problem <ConnectionDC> problem = new Problem <ConnectionDC>(invalidator, calculator, evaluator, localSearch);

            NeuralNetwork network_before = null;
            NeuralNetwork network_final  = SingleTest.CreateNeuralNet_ANNMiner(problem, hiddenUnitCount, true, false, dataset, ref network_before);

            double quilty_before = SingleTest.TestClassifier(network_before, dataset, measure);
            double quilty_final  = SingleTest.TestClassifier(network_final, dataset, measure);

            Console.WriteLine("ANN -" + quilty_before);
            Console.WriteLine("ANN -" + quilty_final);
        }
コード例 #2
0
ファイル: Program.cs プロジェクト: skn123/iFourmi
        public static void Test2()
        {
            AccuracyMeasure accuracyMeasure = new AccuracyMeasure();
            IEnsembleClassificationStrategy majorityVote = new MajorityVoteStrategy();
            IEnsembleClassificationStrategy weightedVote = new WeightedVoteStrategy();


            DataMining.Data.Dataset[] tables      = BatchTest2.LoadTrainingAndTestingData("audiology", 0);
            DataMining.Data.Dataset   trainingSet = tables[0];
            DataMining.Data.Dataset   testingSet  = tables[1];

            //EnsembleClassifier ensemble = SingleTest.CreateGKAntIBMinerClassifier_ClassBaseWeights_Ensemble(trainingSet);

            GaussianKernelEstimator GCE = new GaussianKernelEstimator(-0.5, new DefaultDistanceMeasure(1), trainingSet);



            double quality = 0;

            quality = SingleTest.TestClassifier(GCE, testingSet, accuracyMeasure);



            Console.WriteLine("Accuracy=" + quality);

            Console.WriteLine("-------------------------------------------");
            Console.WriteLine("-------------------------------------------");
            Console.WriteLine("-------------------------------------------");
        }
    public static void Main(string[] argv)
    {
        modshogun.init_shogun_with_defaults();

        double[] ground_truth = Load.load_labels("../data/label_train_twoclass.dat");
        Random   RandomNumber = new Random();

        double[] predicted = new double[ground_truth.Length];
        for (int i = 0; i < ground_truth.Length; i++)
        {
            predicted[i] = RandomNumber.NextDouble();
        }

        BinaryLabels ground_truth_labels = new BinaryLabels(ground_truth);
        BinaryLabels predicted_labels    = new BinaryLabels(predicted);

        ContingencyTableEvaluation base_evaluator = new ContingencyTableEvaluation();

        base_evaluator.evaluate(predicted_labels, ground_truth_labels);

        AccuracyMeasure evaluator1 = new AccuracyMeasure();
        double          accuracy   = evaluator1.evaluate(predicted_labels, ground_truth_labels);

        ErrorRateMeasure evaluator2 = new ErrorRateMeasure();
        double           errorrate  = evaluator2.evaluate(predicted_labels, ground_truth_labels);

        BALMeasure evaluator3 = new BALMeasure();
        double     bal        = evaluator3.evaluate(predicted_labels, ground_truth_labels);

        WRACCMeasure evaluator4 = new WRACCMeasure();
        double       wracc      = evaluator4.evaluate(predicted_labels, ground_truth_labels);

        F1Measure evaluator5 = new F1Measure();
        double    f1         = evaluator5.evaluate(predicted_labels, ground_truth_labels);

        CrossCorrelationMeasure evaluator6 = new CrossCorrelationMeasure();
        double crosscorrelation            = evaluator6.evaluate(predicted_labels, ground_truth_labels);

        RecallMeasure evaluator7 = new RecallMeasure();
        double        recall     = evaluator7.evaluate(predicted_labels, ground_truth_labels);

        PrecisionMeasure evaluator8 = new PrecisionMeasure();
        double           precision  = evaluator8.evaluate(predicted_labels, ground_truth_labels);

        SpecificityMeasure evaluator9  = new SpecificityMeasure();
        double             specificity = evaluator9.evaluate(predicted_labels, ground_truth_labels);

        Console.Write("{0}, {1}, {2}, {3}, {4}, {5}, {6}, {7}, {8}\n", accuracy, errorrate, bal, wracc, f1, crosscorrelation, recall, precision, specificity);

        modshogun.exit_shogun();
    }
コード例 #4
0
ファイル: Program.cs プロジェクト: skn123/iFourmi
        public static void Test1()
        {
            AccuracyMeasure accuracyMeasure = new AccuracyMeasure();
            IEnsembleClassificationStrategy majorityVote = new MajorityVoteStrategy();
            IEnsembleClassificationStrategy weightedVote = new WeightedVoteStrategy();


            DataMining.Data.Dataset[] tables      = BatchTest2.LoadTrainingAndTestingData("audiology", 0);
            DataMining.Data.Dataset   trainingSet = tables[0];
            DataMining.Data.Dataset   testingSet  = tables[1];



            //EnsembleClassifier ensemble = SingleTest.CreateGKAntIBMinerClassifier_ClassBaseWeights_Ensemble(trainingSet);
            EnsembleClassifier ensemble = SingleTest.CreateNCCAntIBMinerClassifier_ClassBasedWeights_Ensemble(trainingSet);
            //EnsembleClassifier ensemble = SingleTest.CreateKNNAntIBMinerClassifier_ClassBasedWeights_Ensemble(trainingSet,false);


            double quality1 = 0;
            double quality2 = 0;
            double quality3 = 0;


            quality1 += SingleTest.TestClassifier(ensemble[0], testingSet, accuracyMeasure);

            ensemble.Stratgy = majorityVote;
            quality2        += SingleTest.TestClassifier(ensemble, testingSet, accuracyMeasure);

            ensemble.Stratgy = weightedVote;
            quality3        += SingleTest.TestClassifier(ensemble, testingSet, accuracyMeasure);



            quality1 = Math.Round((quality1 / 1) * 100, 2);
            quality2 = Math.Round((quality2 / 1) * 100, 2);
            quality3 = Math.Round((quality3 / 1) * 100, 2);

            //------------------------------------------------------------------
            Console.WriteLine("ACO-GKC-CB-CB: Accuracy=" + quality1);

            Console.WriteLine("ACO-GKC-CB-ens-MV: Accuracy=" + quality2);

            Console.WriteLine("ACO-GKC-CB-ens-WV: Accuracy=" + quality3);

            //Console.WriteLine(((KNearestNeighbours)ensemble[0]).KNeighbours.ToString());

            Console.WriteLine("-------------------------------------------");
            Console.WriteLine("-------------------------------------------");
            Console.WriteLine("-------------------------------------------");
        }
コード例 #5
0
ファイル: BatchTest2.cs プロジェクト: skn123/iFourmi
        public static void RunACOIBL_WeightOutputs()
        {
            AccuracyMeasure accuracyMeasure = new AccuracyMeasure();


            foreach (string dataset in GetDatasetFolds("datasets.txt"))
            {
                //----------------------------------------
                Console.WriteLine("Data Table:" + dataset);
                //----------------------------------------

                #region ACO-KNN-CB

                //try
                {
                    for (_currentFold = 0; _currentFold < _folds; _currentFold++)
                    {
                        //----------------------------------------
                        Console.WriteLine("Fold:" + _currentFold.ToString());
                        //----------------------------------------

                        DataMining.Data.Dataset[] tables      = LoadTrainingAndTestingData(dataset, _currentFold);
                        DataMining.Data.Dataset   trainingSet = tables[0];
                        DataMining.Data.Dataset   testingSet  = tables[1];



                        KNearestNeighbours knnclassifier = SingleTest.CreateKNNAntIBMinerClassifier_ClassBasedWeights(trainingSet, false);
                        //------------------------------------------------------------------
                        Console.WriteLine("ACO-KNN-CB: " + dataset);
                        SaveWeights(trainingSet, knnclassifier);
                    }



                    Console.WriteLine("-------------------------------------------");
                    Console.WriteLine("-------------------------------------------");
                    Console.WriteLine("-------------------------------------------");
                }

                //catch (Exception ex)
                {
                    //LogError(ex);
                    //  Console.WriteLine(ex.Message);
                }

                #endregion
            }
        }
コード例 #6
0
    public static void Main(string[] argv)
    {
        modshogun.init_shogun_with_defaults();

        double[] ground_truth = Load.load_labels("../data/label_train_twoclass.dat");
        Random RandomNumber = new Random();
        double[] predicted = new double[ground_truth.Length];
        for (int i = 0; i < ground_truth.Length; i++) {
            predicted[i] = RandomNumber.NextDouble();
        }

        BinaryLabels ground_truth_labels = new BinaryLabels(ground_truth);
        BinaryLabels predicted_labels = new BinaryLabels(predicted);

        ContingencyTableEvaluation base_evaluator = new ContingencyTableEvaluation();
        base_evaluator.evaluate(predicted_labels,ground_truth_labels);

        AccuracyMeasure evaluator1 = new AccuracyMeasure();
        double accuracy = evaluator1.evaluate(predicted_labels,ground_truth_labels);

        ErrorRateMeasure evaluator2 = new ErrorRateMeasure();
        double errorrate = evaluator2.evaluate(predicted_labels,ground_truth_labels);

        BALMeasure evaluator3 = new BALMeasure();
        double bal = evaluator3.evaluate(predicted_labels,ground_truth_labels);

        WRACCMeasure evaluator4 = new WRACCMeasure();
        double wracc = evaluator4.evaluate(predicted_labels,ground_truth_labels);

        F1Measure evaluator5 = new F1Measure();
        double f1 = evaluator5.evaluate(predicted_labels,ground_truth_labels);

        CrossCorrelationMeasure evaluator6 = new CrossCorrelationMeasure();
        double crosscorrelation = evaluator6.evaluate(predicted_labels,ground_truth_labels);

        RecallMeasure evaluator7 = new RecallMeasure();
        double recall = evaluator7.evaluate(predicted_labels,ground_truth_labels);

        PrecisionMeasure evaluator8 = new PrecisionMeasure();
        double precision = evaluator8.evaluate(predicted_labels,ground_truth_labels);

        SpecificityMeasure evaluator9 = new SpecificityMeasure();
        double specificity = evaluator9.evaluate(predicted_labels,ground_truth_labels);

        Console.Write("{0}, {1}, {2}, {3}, {4}, {5}, {6}, {7}, {8}\n", accuracy, errorrate, bal, wracc, f1, crosscorrelation, recall, precision, specificity);

        modshogun.exit_shogun();
    }
コード例 #7
0
        static void TestBackProbagation(Dataset dataset)
        {
            IActivationFunction activationFunction = new SigmoidActivationFunction();
            //int hiddenUnitCount = dataset.Metadata.Attributes.Length * dataset.Metadata.Target.Length;

            int hiddenUnitCount = (dataset.Metadata.Attributes.Length + dataset.Metadata.Target.Length) / 2;

            NeuralNetwork network = SingleTest.CreateNeuralNet_BP(dataset, hiddenUnitCount, 0.9, 0.01, 1000, activationFunction);

            AccuracyMeasure measure = new AccuracyMeasure();

            double quality = SingleTest.TestClassifier(network, dataset, measure);

            Console.WriteLine(measure.ToString() + ":" + Math.Round(quality * 100, 2));
            Console.WriteLine("Size:" + network.Size);
        }
コード例 #8
0
    static void Main(string[] argv)
    {
        modshogun.init_shogun_with_defaults();

        DoubleMatrix ground_truth = Load.load_labels("../data/label_train_twoclass.dat");
        DoubleMatrix predicted = randn(1, ground_truth.Length);

        Labels ground_truth_labels = new Labels(ground_truth);
        Labels predicted_labels = new Labels(predicted);

        ContingencyTableEvaluation base_evaluator = new ContingencyTableEvaluation();
        base_evaluator.evaluate(predicted_labels,ground_truth_labels);

        AccuracyMeasure evaluator1 = new AccuracyMeasure();
        double accuracy = evaluator1.evaluate(predicted_labels,ground_truth_labels);

        ErrorRateMeasure evaluator2 = new ErrorRateMeasure();
        double errorrate = evaluator2.evaluate(predicted_labels,ground_truth_labels);

        BALMeasure evaluator3 = new BALMeasure();
        double bal = evaluator3.evaluate(predicted_labels,ground_truth_labels);

        WRACCMeasure evaluator4 = new WRACCMeasure();
        double wracc = evaluator4.evaluate(predicted_labels,ground_truth_labels);

        F1Measure evaluator5 = new F1Measure();
        double f1 = evaluator5.evaluate(predicted_labels,ground_truth_labels);

        CrossCorrelationMeasure evaluator6 = new CrossCorrelationMeasure();
        double crosscorrelation = evaluator6.evaluate(predicted_labels,ground_truth_labels);

        RecallMeasure evaluator7 = new RecallMeasure();
        double recall = evaluator7.evaluate(predicted_labels,ground_truth_labels);

        PrecisionMeasure evaluator8 = new PrecisionMeasure();
        double precision = evaluator8.evaluate(predicted_labels,ground_truth_labels);

        SpecificityMeasure evaluator9 = new SpecificityMeasure();
        double specificity = evaluator9.evaluate(predicted_labels,ground_truth_labels);

        Console.Write("{0:F}, {1:F}, {2:F}, {3:F}, {4:F}, {5:F}, {6:F}, {7:F}, {8:F}\n", accuracy, errorrate, bal, wracc, f1, crosscorrelation, recall, precision, specificity);

        modshogun.exit_shogun();
    }
コード例 #9
0
        public static EnsembleClassifier CreateNCCAntIBMinerClassifier_Ensemble(Dataset trainingSet)
        {
            int classCount      = trainingSet.Metadata.Target.Values.Length;
            int attributesCount = trainingSet.Metadata.Attributes.Length;

            int problemSize = attributesCount + 1;

            AccuracyMeasure                  measure         = new AccuracyMeasure();
            DefaultDistanceMeasure           distanceMeasure = new DefaultDistanceMeasure(2);
            NearestClassClassifier           ncc             = new NearestClassClassifier(distanceMeasure, trainingSet);
            IBClassificationQualityEvaluator evaluator       = new ContinuousACO.ProblemSpecifics.IBClassificationQualityEvaluator(ncc, measure);

            evaluator.LearningSet   = trainingSet;
            evaluator.ValidationSet = trainingSet;
            Problem <double> problem = new Problem <double>(null, null, evaluator, null);

            AntIBMiner         antminer = new AntIBMiner(maxIterations, colonySize, convergenceIterations, problem, problemSize, archive, q, segma, trainingSet);
            EnsembleClassifier aconcc   = antminer.CreateEnsembleClassifier();

            return(aconcc);
        }
コード例 #10
0
        public static EnsembleClassifier CreateKNNPSOIBMinerClassifier_ClassBasedWeights_Ensemble(Dataset trainingSet, bool useWeightedVote)
        {
            int classCount      = trainingSet.Metadata.Target.Values.Length;
            int attributesCount = trainingSet.Metadata.Attributes.Length;

            int problemSize = (attributesCount * classCount) + 1;

            AccuracyMeasure                  measure         = new AccuracyMeasure();
            DefaultDistanceMeasure           distanceMeasure = new DefaultDistanceMeasure(2);
            KNearestNeighbours               knn             = new KNearestNeighbours(distanceMeasure, trainingSet, useWeightedVote);
            IBClassificationQualityEvaluator evaluator       = new ContinuousACO.ProblemSpecifics.IBClassificationQualityEvaluator(knn, measure);

            evaluator.LearningSet   = trainingSet;
            evaluator.ValidationSet = trainingSet;

            PSOIB psoIB = new PSOIB(problemSize, archive, maxIterations / archive, convergenceIterations, evaluator);

            psoIB.OnPostSwarmIteration += OnPostColonyIteration;

            EnsembleClassifier psoknn = psoIB.CreateEnsembleClassifier();

            return(psoknn);
        }
コード例 #11
0
        public static EnsembleClassifier CreateNCCPSOIBMinerClassifier_Ensemble(Dataset trainingSet)
        {
            int classCount      = trainingSet.Metadata.Target.Values.Length;
            int attributesCount = trainingSet.Metadata.Attributes.Length;

            int problemSize = attributesCount + 1;

            AccuracyMeasure                  measure         = new AccuracyMeasure();
            DefaultDistanceMeasure           distanceMeasure = new DefaultDistanceMeasure(2);
            NearestClassClassifier           ncc             = new NearestClassClassifier(distanceMeasure, trainingSet);
            IBClassificationQualityEvaluator evaluator       = new ContinuousACO.ProblemSpecifics.IBClassificationQualityEvaluator(ncc, measure);

            evaluator.LearningSet   = trainingSet;
            evaluator.ValidationSet = trainingSet;

            PSOIB psoIB = new PSOIB(problemSize, archive, maxIterations / archive, convergenceIterations, evaluator);

            psoIB.OnPostSwarmIteration += OnPostColonyIteration;

            EnsembleClassifier psoncc = psoIB.CreateEnsembleClassifier();

            return(psoncc);
        }
コード例 #12
0
        public static EnsembleClassifier CreateGKPSOIBMinerClassifier_ClassBaseWeights_Ensemble(Dataset trainingSet)
        {
            int classCount      = trainingSet.Metadata.Target.Values.Length;
            int attributesCount = trainingSet.Metadata.Attributes.Length;

            int problemSize = (attributesCount * classCount) + 1;

            AccuracyMeasure                  measure         = new AccuracyMeasure();
            DefaultDistanceMeasure           distanceMeasure = new DefaultDistanceMeasure(1);
            GaussianKernelEstimator          gke             = new GaussianKernelEstimator(1, distanceMeasure, trainingSet);
            IBClassificationQualityEvaluator evaluator       = new ContinuousACO.ProblemSpecifics.IBClassificationQualityEvaluator(gke, measure);

            evaluator.LearningSet   = trainingSet;
            evaluator.ValidationSet = trainingSet;

            PSOIB psoIB = new PSOIB(problemSize, archive, maxIterations / archive, convergenceIterations, evaluator);

            psoIB.OnPostSwarmIteration += OnPostColonyIteration;

            EnsembleClassifier psogke = psoIB.CreateEnsembleClassifier();

            return(psogke);
        }
コード例 #13
0
        public static EnsembleClassifier CreateGKAntIBMinerClassifier_ClassBaseWeights_Ensemble(Dataset trainingSet)
        {
            int classCount      = trainingSet.Metadata.Target.Values.Length;
            int attributesCount = trainingSet.Metadata.Attributes.Length;

            int problemSize = (attributesCount * classCount) + 1;

            AccuracyMeasure                  measure         = new AccuracyMeasure();
            DefaultDistanceMeasure           distanceMeasure = new DefaultDistanceMeasure(1);
            GaussianKernelEstimator          gke             = new GaussianKernelEstimator(0.5, distanceMeasure, trainingSet);
            IBClassificationQualityEvaluator evaluator       = new ContinuousACO.ProblemSpecifics.IBClassificationQualityEvaluator(gke, measure);

            evaluator.LearningSet   = trainingSet;
            evaluator.ValidationSet = trainingSet;
            Problem <double> problem = new Problem <double>(null, null, evaluator, null);

            AntIBMiner antminer = new AntIBMiner(maxIterations, colonySize, convergenceIterations, problem, problemSize, archive, q, segma, trainingSet);

            antminer.OnPostColonyIteration += OnPostColonyIteration;

            EnsembleClassifier acogke = antminer.CreateEnsembleClassifier();

            return(acogke);
        }
コード例 #14
0
        public static KNearestNeighbours CreateKNNAntIBMinerClassifier_ClassBasedWeights(Dataset trainingSet, bool useWeightedVote)
        {
            int classCount      = trainingSet.Metadata.Target.Values.Length;
            int attributesCount = trainingSet.Metadata.Attributes.Length;

            int problemSize = (attributesCount * classCount) + 1;

            AccuracyMeasure                  measure         = new AccuracyMeasure();
            DefaultDistanceMeasure           distanceMeasure = new DefaultDistanceMeasure(2);
            KNearestNeighbours               knn             = new KNearestNeighbours(distanceMeasure, trainingSet, useWeightedVote);
            IBClassificationQualityEvaluator evaluator       = new ContinuousACO.ProblemSpecifics.IBClassificationQualityEvaluator(knn, measure);

            evaluator.LearningSet   = trainingSet;
            evaluator.ValidationSet = trainingSet;
            Problem <double> problem = new Problem <double>(null, null, evaluator, null);

            AntIBMiner antminer = new AntIBMiner(maxIterations, colonySize, convergenceIterations, problem, problemSize, archive, q, segma, trainingSet);

            antminer.OnPostColonyIteration += OnPostColonyIteration;

            KNearestNeighbours acoknn = antminer.CreateClassifier() as KNearestNeighbours;

            return(acoknn);
        }
コード例 #15
0
        public static void RunANNMiner_QEM()
        {
            AccuracyMeasure testMeasure = new AccuracyMeasure();

            foreach (string dataset in GetDatasetFolds("datasets.txt"))
            {
                //----------------------------------------
                Console.WriteLine("Data Table:" + dataset);
                //----------------------------------------

                foreach (IClassificationMeasure measure in GetMeasures())
                {
                    for (_currentFold = 0; _currentFold < _folds; _currentFold++)
                    {
                        //----------------------------------------
                        Console.WriteLine(dataset + " - Fold:" + _currentFold.ToString() + " - " + measure.ToString());
                        //----------------------------------------

                        DataMining.Data.Dataset[] tables      = LoadTrainingAndTestingData(dataset, _currentFold);
                        DataMining.Data.Dataset   trainingSet = tables[0];
                        DataMining.Data.Dataset   testingSet  = tables[1];

                        double quality_before = 0.0;
                        double quality_final  = 0.0;

                        double size_before = 0.0;
                        double size_final  = 0.0;

                        ILearningMethod learningMethod = new BackPropagation(0.05, 10, 0.9, false);

                        int hiddenUnitCount = (trainingSet.Metadata.Attributes.Length + trainingSet.Metadata.Target.Length);

                        IActivationFunction activationFunction = new SigmoidActivationFunction();

                        NNClassificationQualityEvaluator         evaluator   = new NNClassificationQualityEvaluator(measure, learningMethod, hiddenUnitCount, activationFunction);
                        NNConnectionHeuristicCalculator          calculator  = new NNConnectionHeuristicCalculator(0.7);
                        DefaultRemovalLocalSearch <ConnectionDC> localSearch = new DefaultRemovalLocalSearch <ConnectionDC>(evaluator);
                        NNConnectorInvalidator invalidator = new NNConnectorInvalidator();

                        Problem <ConnectionDC> problem = new Problem <ConnectionDC>(invalidator, calculator, evaluator, localSearch);

                        NeuralNetwork network_before = null;

                        try
                        {
                            NeuralNetwork network_final = SingleTest.CreateNeuralNet_ANNMiner(problem, hiddenUnitCount, true, false, trainingSet, ref network_before);

                            quality_before = SingleTest.TestClassifier(network_before, testingSet, testMeasure);
                            quality_before = Math.Round(quality_before * 100, 2);

                            quality_final = SingleTest.TestClassifier(network_final, testingSet, testMeasure);
                            quality_final = Math.Round(quality_final * 100, 2);

                            size_before = network_before.Size;
                            size_final  = network_final.Size;

                            //----------------------------------------
                            Console.WriteLine("ANNMiner - before:" + dataset + "- Fold:" + _currentFold.ToString() + "=>" + measure.ToString() + ":" + quality_before.ToString());
                            Console.WriteLine("ANNMiner - final:" + dataset + "- Fold:" + _currentFold.ToString() + "=>" + measure.ToString() + ":" + quality_final.ToString());
                            Console.WriteLine("---------------------------------------------------");
                            //----------------------------------------
                            SaveResults(dataset, "ANNMiner - before", measure.ToString(), quality_before.ToString(), size_before.ToString());
                            SaveResults(dataset, "ANNMiner - final", measure.ToString(), quality_final.ToString(), size_final.ToString());
                        }
                        catch (Exception ex)
                        {
                            LogError(ex);
                            break;
                        }
                    }
                }

                Console.WriteLine("---------------------------------------------------");
                Console.WriteLine("---------------------------------------------------");
                Console.WriteLine("---------------------------------------------------");
            }
        }
コード例 #16
0
        public static void RunGHCNN()
        {
            foreach (string dataset in GetDatasetFolds("datasets.txt"))
            {
                //----------------------------------------
                Console.WriteLine("Data Table:" + dataset);
                //----------------------------------------

                double avgQualityBefore = 0;
                double avgSizeBefore    = 0;

                double avgQulityAfter = 0;
                double avgSizeAfter   = 0;


                for (_currentFold = 0; _currentFold < _folds; _currentFold++)
                {
                    //----------------------------------------
                    Console.WriteLine("Fold:" + _currentFold.ToString());
                    //----------------------------------------

                    DataMining.Data.Dataset[] tables      = LoadTrainingAndTestingData(dataset, _currentFold);
                    DataMining.Data.Dataset   trainingSet = tables[0];
                    DataMining.Data.Dataset   testingSet  = tables[1];

                    double quality_before = 0.0;
                    double quality_final  = 0.0;

                    double size_before = 0.0;
                    double size_final  = 0.0;

                    IClassificationMeasure testMeasure    = new AccuracyMeasure();
                    ILearningMethod        learningMethod = new BackPropagation(_acoLearningRateNW, _acoEpochsNW, 0.9, false);

                    //int hiddenUnitCount = trainingSet.Metadata.Attributes.Length * trainingSet.Metadata.Target.Length;
                    int hiddenUnitCount = (trainingSet.Metadata.Attributes.Length + trainingSet.Metadata.Target.Length);

                    IActivationFunction activationFunction = new SigmoidActivationFunction();

                    IClassificationMeasure                   trainingMeasure = new QLFunction();
                    NNClassificationQualityEvaluator         evaluator       = new NNClassificationQualityEvaluator(trainingMeasure, learningMethod, hiddenUnitCount, activationFunction);
                    NNConnectionHeuristicCalculator          calculator      = new NNConnectionHeuristicCalculator(0.7);
                    DefaultRemovalLocalSearch <ConnectionDC> localSearch     = new DefaultRemovalLocalSearch <ConnectionDC>(evaluator);
                    NNConnectorInvalidator                   invalidator     = new NNConnectorInvalidator();

                    Problem <ConnectionDC> problem = new Problem <ConnectionDC>(invalidator, calculator, evaluator, localSearch);

                    NeuralNetwork network_before = null;

                    try
                    {
                        stopWatch.Reset();
                        stopWatch.Start();
                        NeuralNetwork network_final = SingleTest.CreateNeuralNet_GHCNN(problem, hiddenUnitCount, true, false, trainingSet, ref network_before);
                        stopWatch.Stop();

                        quality_before    = SingleTest.TestClassifier(network_before, testingSet, testMeasure);
                        quality_before    = Math.Round(quality_before * 100, 2);
                        avgQualityBefore += quality_before;

                        quality_final   = SingleTest.TestClassifier(network_final, testingSet, testMeasure);
                        quality_final   = Math.Round(quality_final * 100, 2);
                        avgQulityAfter += quality_final;

                        size_before = network_before.Size;
                        size_final  = network_final.Size;

                        avgSizeBefore += size_before;
                        avgSizeAfter  += avgSizeAfter;

                        //----------------------------------------
                        Console.WriteLine("GHCNN - before:" + dataset + "- Fold:" + _currentFold.ToString() + "=>" + testMeasure.ToString() + ":" + quality_before.ToString());
                        Console.WriteLine("GHCNN - final:" + dataset + "- Fold:" + _currentFold.ToString() + "=>" + testMeasure.ToString() + ":" + quality_final.ToString());
                        Console.WriteLine("---------------------------------------------------");
                        //----------------------------------------
                    }
                    catch (Exception ex)
                    {
                        LogError(ex);
                        break;
                    }
                }

                avgQualityBefore /= _folds;
                avgQulityAfter   /= _folds;

                avgSizeBefore /= _folds;
                avgSizeAfter  /= _folds;

                SaveResults(dataset, "GHCNN - before", avgQualityBefore.ToString(), avgSizeBefore.ToString(), stopWatch.ElapsedMilliseconds.ToString());
                SaveResults(dataset, "GHCNN - final", avgQulityAfter.ToString(), avgSizeAfter.ToString(), stopWatch.ElapsedMilliseconds.ToString());



                Console.WriteLine("---------------------------------------------------");
                Console.WriteLine("---------------------------------------------------");
                Console.WriteLine("---------------------------------------------------");
            }
        }
コード例 #17
0
        public static void RunBackPropagation()
        {
            foreach (string dataset in GetDatasetFolds("datasets.txt"))
            {
                //----------------------------------------
                Console.WriteLine("Data Table:" + dataset);
                //----------------------------------------

                double avgQuality = 0;
                double avgSize    = 0;

                for (_currentFold = 0; _currentFold < _folds; _currentFold++)
                {
                    //----------------------------------------
                    Console.WriteLine("Fold:" + _currentFold.ToString());
                    //----------------------------------------

                    DataMining.Data.Dataset[] tables      = LoadTrainingAndTestingData(dataset, _currentFold);
                    DataMining.Data.Dataset   trainingSet = tables[0];
                    DataMining.Data.Dataset   testingSet  = tables[1];

                    double quality = 0.0;
                    double size    = 0.0;

                    AccuracyMeasure     testMeasure        = new AccuracyMeasure();
                    IActivationFunction activationFunction = new SigmoidActivationFunction();

                    //int hiddenUnitCount = trainingSet.Metadata.Attributes.Length * trainingSet.Metadata.Target.Length;
                    int hiddenUnitCount = (trainingSet.Metadata.Attributes.Length + trainingSet.Metadata.Target.Length);

                    try
                    {
                        stopWatch.Reset();
                        stopWatch.Start();
                        NeuralNetwork network = SingleTest.CreateNeuralNet_BP(trainingSet, hiddenUnitCount, 0.9, _bpLearningRate, _bpEpochs, activationFunction);
                        stopWatch.Stop();

                        quality = SingleTest.TestClassifier(network, testingSet, testMeasure);
                        quality = Math.Round(quality * 100, 2);
                        size    = network.Size;

                        avgQuality += quality;
                        avgSize    += size;

                        //----------------------------------------
                        Console.WriteLine("Backprop:" + dataset + "- Fold:" + _currentFold.ToString() + "=>" + testMeasure.ToString() + ":" + quality.ToString());
                        Console.WriteLine("---------------------------------------------------");
                        //----------------------------------------
                    }
                    catch (Exception ex)
                    {
                        LogError(ex);
                        break;
                    }
                }

                avgQuality /= _folds;
                avgSize    /= _folds;
                SaveResults(dataset, "BackProp", avgQuality.ToString(), avgSize.ToString(), stopWatch.ElapsedMilliseconds.ToString());


                Console.WriteLine("---------------------------------------------------");
                Console.WriteLine("---------------------------------------------------");
                Console.WriteLine("---------------------------------------------------");
            }
        }
コード例 #18
0
        public static void RunACOIBL()
        {
            int             k = 9;
            AccuracyMeasure accuracyMeasure = new AccuracyMeasure();

            foreach (string dataset in GetDatasetFolds("datasets.txt"))
            {
                //----------------------------------------
                Console.WriteLine("Data Table:" + dataset);
                //----------------------------------------

                for (_currentFold = 0; _currentFold < _folds; _currentFold++)
                {
                    //----------------------------------------
                    //Console.WriteLine("Fold:" + _currentFold.ToString());
                    //----------------------------------------

                    DataMining.Data.Dataset[] tables      = LoadTrainingAndTestingData(dataset, _currentFold);
                    DataMining.Data.Dataset   trainingSet = tables[0];
                    DataMining.Data.Dataset   testingSet  = tables[1];

                    Dataset datasetFull = Dataset.Merge(trainingSet, testingSet);

                    double quality = 0;

                    try
                    {
                        {
                            KNearestNeighbours knn = SingleTest.CreateKNNClassifier(k, datasetFull, false);
                            quality = SingleTest.TestClassifier(knn, datasetFull, accuracyMeasure);
                            quality = Math.Round(quality * 100, 2);
                            //------------------------------------------------------------------
                            Console.WriteLine("KNN: " + dataset + " - Accuracy=" + quality);
                            SaveResults(dataset, "KNN", k.ToString(), quality.ToString());
                            Console.WriteLine("-------------------------------------------");
                            Console.WriteLine("-------------------------------------------");
                            Console.WriteLine("-------------------------------------------");
                        }

                        {
                            KNearestNeighbours knnWV = SingleTest.CreateKNNClassifier(k, datasetFull, true);
                            quality = SingleTest.TestClassifier(knnWV, datasetFull, accuracyMeasure);
                            quality = Math.Round(quality * 100, 2);
                            //------------------------------------------------------------------
                            Console.WriteLine("KNN-WV: " + dataset + " - Accuracy=" + quality);
                            SaveResults(dataset, "KNN-WV", k.ToString(), quality.ToString());
                            Console.WriteLine("-------------------------------------------");
                            Console.WriteLine("-------------------------------------------");
                            Console.WriteLine("-------------------------------------------");
                        }

                        {
                            NearestClassClassifier ncc = SingleTest.CreateNCClassifier(datasetFull);
                            quality = SingleTest.TestClassifier(ncc, datasetFull, accuracyMeasure);
                            quality = Math.Round(quality * 100, 2);
                            //------------------------------------------------------------------
                            Console.WriteLine("NNC: " + dataset + " - Accuracy=" + quality);
                            SaveResults(dataset, "NNC", quality.ToString());
                            Console.WriteLine("-------------------------------------------");
                            Console.WriteLine("-------------------------------------------");
                            Console.WriteLine("-------------------------------------------");
                        }

                        {
                            KNearestNeighbours knn = SingleTest.CreateKNNAntIBMinerClassifier(k, datasetFull, false);
                            quality = SingleTest.TestClassifier(knn, datasetFull, accuracyMeasure);
                            quality = Math.Round(quality * 100, 2);
                            //------------------------------------------------------------------
                            Console.WriteLine("ACO-KNN: " + dataset + " - Accuracy=" + quality);
                            SaveResults(dataset, "ACO-KNN", k.ToString(), quality.ToString());
                            Console.WriteLine("-------------------------------------------");
                            Console.WriteLine("-------------------------------------------");
                            Console.WriteLine("-------------------------------------------");
                        }

                        {
                            KNearestNeighbours knn = SingleTest.CreateKNNAntIBMinerClassifier(k, datasetFull, true);
                            quality = SingleTest.TestClassifier(knn, datasetFull, accuracyMeasure);
                            quality = Math.Round(quality * 100, 2);
                            //------------------------------------------------------------------
                            Console.WriteLine("ACO-KNN-WV: " + dataset + " - Accuracy=" + quality);
                            SaveResults(dataset, "ACO-KNN-WV", k.ToString(), quality.ToString());
                            Console.WriteLine("-------------------------------------------");
                            Console.WriteLine("-------------------------------------------");
                            Console.WriteLine("-------------------------------------------");
                        }

                        {
                            KNearestNeighbours knn = SingleTest.CreateKNNAntIBMinerClassifier_ClassBasedWeights(k, datasetFull, false);
                            quality = SingleTest.TestClassifier(knn, datasetFull, accuracyMeasure);
                            quality = Math.Round(quality * 100, 2);
                            //------------------------------------------------------------------
                            Console.WriteLine("ACO-KNN-CB: " + dataset + " - Accuracy=" + quality);
                            SaveResults(dataset, "ACO-KNN-CB", k.ToString(), quality.ToString());
                            Console.WriteLine("-------------------------------------------");
                            Console.WriteLine("-------------------------------------------");
                            Console.WriteLine("-------------------------------------------");
                        }

                        {
                            KNearestNeighbours knn = SingleTest.CreateKNNAntIBMinerClassifier_ClassBasedWeights(k, datasetFull, true);
                            quality = SingleTest.TestClassifier(knn, datasetFull, accuracyMeasure);
                            quality = Math.Round(quality * 100, 2);
                            //------------------------------------------------------------------
                            Console.WriteLine("ACO-KNN-CB-WV: " + dataset + " - Accuracy=" + quality);
                            SaveResults(dataset, "ACO-KNN-CB-WV", k.ToString(), quality.ToString());
                            Console.WriteLine("-------------------------------------------");
                            Console.WriteLine("-------------------------------------------");
                            Console.WriteLine("-------------------------------------------");
                        }

                        {
                            NearestClassClassifier ncc = SingleTest.CreateNCCAntIBMinerClassifier(datasetFull);
                            quality = SingleTest.TestClassifier(ncc, datasetFull, accuracyMeasure);
                            quality = Math.Round(quality * 100, 2);
                            //------------------------------------------------------------------
                            Console.WriteLine("ACO-NCC: " + dataset + " - Accuracy=" + quality);
                            SaveResults(dataset, "ACO-NCC", quality.ToString());
                            Console.WriteLine("-------------------------------------------");
                            Console.WriteLine("-------------------------------------------");
                            Console.WriteLine("-------------------------------------------");
                        }

                        {
                            NearestClassClassifier ncc = SingleTest.CreateNCCAntIBMinerClassifier_ClassBasedWeights(datasetFull);
                            quality = SingleTest.TestClassifier(ncc, datasetFull, accuracyMeasure);
                            quality = Math.Round(quality * 100, 2);
                            //------------------------------------------------------------------
                            Console.WriteLine("ACO-NCC-CB: " + dataset + " - Accuracy=" + quality);
                            SaveResults(dataset, "ACO-NCC-CB", quality.ToString());
                            Console.WriteLine("-------------------------------------------");
                            Console.WriteLine("-------------------------------------------");
                            Console.WriteLine("-------------------------------------------");
                        }

                        {
                            GaussianKernelEstimator GKC = SingleTest.CreateGKAntIBMinerClassifier(datasetFull);
                            quality = SingleTest.TestClassifier(GKC, datasetFull, accuracyMeasure);
                            quality = Math.Round(quality * 100, 2);
                            //------------------------------------------------------------------
                            Console.WriteLine("ACO-GKC: " + dataset + " - Accuracy=" + quality);
                            SaveResults(dataset, "ACO-GKC", quality.ToString());
                            Console.WriteLine("-------------------------------------------");
                            Console.WriteLine("-------------------------------------------");
                            Console.WriteLine("-------------------------------------------");
                        }

                        {
                            GaussianKernelEstimator GKC = SingleTest.CreateGKAntIBMinerClassifier_ClassBaseWeights(datasetFull);
                            quality = SingleTest.TestClassifier(GKC, datasetFull, accuracyMeasure);
                            quality = Math.Round(quality * 100, 2);
                            //------------------------------------------------------------------
                            Console.WriteLine("ACO-GKC-CB: " + dataset + " - Accuracy=" + quality);
                            SaveResults(dataset, "ACO-GKC-CB", quality.ToString());
                            Console.WriteLine("-------------------------------------------");
                            Console.WriteLine("-------------------------------------------");
                            Console.WriteLine("-------------------------------------------");
                        }
                    }
                    catch (Exception ex)
                    {
                        LogError(ex);
                        //  Console.WriteLine(ex.Message);
                    }
                }
            }
        }
コード例 #19
0
ファイル: AccuracyMeasure.cs プロジェクト: Anshul-Bansal/gsoc
 internal static HandleRef getCPtr(AccuracyMeasure obj) {
   return (obj == null) ? new HandleRef(null, IntPtr.Zero) : obj.swigCPtr;
 }
コード例 #20
0
ファイル: BatchTest2.cs プロジェクト: skn123/iFourmi
        public static void RunPSOIBL()
        {
            AccuracyMeasure accuracyMeasure = new AccuracyMeasure();
            IEnsembleClassificationStrategy majorityVote = new MajorityVoteStrategy();
            IEnsembleClassificationStrategy weightedVote = new WeightedVoteStrategy();

            foreach (string dataset in GetDatasetFolds(DatasetNamesFile))
            {
                //----------------------------------------
                Console.WriteLine("Data Table:" + dataset);
                //----------------------------------------

                #region PSO-KNN-CB

                try
                {
                    double quality1 = 0;
                    //double quality2 = 0;
                    double quality3 = 0;


                    for (_currentFold = 0; _currentFold < _folds; _currentFold++)
                    {
                        //----------------------------------------
                        Console.WriteLine("Fold:" + _currentFold.ToString());
                        //----------------------------------------

                        DataMining.Data.Dataset[] tables      = LoadTrainingAndTestingData(dataset, _currentFold);
                        DataMining.Data.Dataset   trainingSet = tables[0];
                        DataMining.Data.Dataset   testingSet  = tables[1];

                        EnsembleClassifier ensemble = SingleTest.CreateKNNPSOIBMinerClassifier_ClassBasedWeights_Ensemble(trainingSet, false);

                        quality1 += SingleTest.TestClassifier(ensemble[0], testingSet, accuracyMeasure);

                        ensemble.Stratgy = majorityVote;
                        //quality2 += SingleTest.TestClassifier(ensemble, testingSet, accuracyMeasure);

                        ensemble.Stratgy = weightedVote;
                        quality3        += SingleTest.TestClassifier(ensemble, testingSet, accuracyMeasure);
                    }

                    quality1 = Math.Round((quality1 / _folds) * 100, 2);
                    // quality2 = Math.Round((quality2 / _folds) * 100, 2);
                    quality3 = Math.Round((quality3 / _folds) * 100, 2);

                    //------------------------------------------------------------------
                    Console.WriteLine("PSO-KNN-CB: " + dataset + " - Accuracy=" + quality1);
                    SaveResults(dataset, "PSO-KNN-CB", quality1.ToString());

                    //Console.WriteLine("PSO-KNN-CB-ens-MV: " + dataset + " - Accuracy=" + quality2);
                    //SaveResults(dataset, "PSO-KNN-CB-ens-MV",quality2.ToString());

                    Console.WriteLine("PSO-KNN-CB-ens-WV: " + dataset + " - Accuracy=" + quality3);
                    SaveResults(dataset, "PSO-KNN-CB-ens-WV", quality3.ToString());
                    Console.WriteLine("-------------------------------------------");
                    Console.WriteLine("-------------------------------------------");
                    Console.WriteLine("-------------------------------------------");
                }

                catch (Exception ex)
                {
                    //   LogError(ex);
                    //  Console.WriteLine(ex.Message);
                }

                #endregion

                #region PSO-KNN-CB-WV

                //try
                //{
                //    double quality1 = 0;
                //    double quality2 = 0;
                //    double quality3 = 0;


                //    for (_currentFold = 0; _currentFold < _folds; _currentFold++)
                //    {
                //        //----------------------------------------
                //        Console.WriteLine("Fold:" + _currentFold.ToString());
                //        //----------------------------------------

                //        DataMining.Data.Dataset[] tables = LoadTrainingAndTestingData(dataset, _currentFold);
                //        DataMining.Data.Dataset trainingSet = tables[0];
                //        DataMining.Data.Dataset testingSet = tables[1];

                //        EnsembleClassifier ensemble = SingleTest.CreateKNNPSOIBMinerClassifier_ClassBasedWeights_Ensemble(trainingSet, true);

                //        quality1 += SingleTest.TestClassifier(ensemble[0], testingSet, accuracyMeasure);

                //        ensemble.Stratgy = majorityVote;
                //        quality2 += SingleTest.TestClassifier(ensemble, testingSet, accuracyMeasure);

                //        ensemble.Stratgy = weightedVote;
                //        quality3 += SingleTest.TestClassifier(ensemble, testingSet, accuracyMeasure);



                //    }

                //    quality1 = Math.Round((quality1 / _folds) * 100, 2);
                //    quality2 = Math.Round((quality2 / _folds) * 100, 2);
                //    quality3 = Math.Round((quality3 / _folds) * 100, 2);

                //    //------------------------------------------------------------------
                //    Console.WriteLine("PSO-KNN-CB-WV: " + dataset + " - Accuracy=" + quality1);
                //    SaveResults(dataset, "PSO-KNN-CB-WV", k.ToString(), quality1.ToString());

                //    Console.WriteLine("PSO-KNN-CB-WV-ens-MV: " + dataset + " - Accuracy=" + quality2);
                //    SaveResults(dataset, "PSO-KNN-CB-WV-ens-MV", k.ToString(), quality2.ToString());

                //    Console.WriteLine("PSO-KNN-CB-WV-ens-WV: " + dataset + " - Accuracy=" + quality3);
                //    SaveResults(dataset, "PSO-KNN-CB-WV-ens-WV", k.ToString(), quality3.ToString());

                //    Console.WriteLine("-------------------------------------------");
                //    Console.WriteLine("-------------------------------------------");
                //    Console.WriteLine("-------------------------------------------");

                //}

                //catch (Exception ex)
                //{
                //    LogError(ex);
                //    //  Console.WriteLine(ex.Message);
                //}

                #endregion

                #region PSO-NCC-CB

                try
                {
                    double quality1 = 0;
                    //double quality2 = 0;
                    double quality3 = 0;


                    for (_currentFold = 0; _currentFold < _folds; _currentFold++)
                    {
                        //----------------------------------------
                        Console.WriteLine("Fold:" + _currentFold.ToString());
                        //----------------------------------------

                        DataMining.Data.Dataset[] tables      = LoadTrainingAndTestingData(dataset, _currentFold);
                        DataMining.Data.Dataset   trainingSet = tables[0];
                        DataMining.Data.Dataset   testingSet  = tables[1];

                        EnsembleClassifier ensemble = SingleTest.CreateNCCPSOIBMinerClassifier_ClassBasedWeights_Ensemble(trainingSet);

                        quality1 += SingleTest.TestClassifier(ensemble[0], testingSet, accuracyMeasure);

                        //ensemble.Stratgy = majorityVote;
                        //quality2 += SingleTest.TestClassifier(ensemble, testingSet, accuracyMeasure);

                        ensemble.Stratgy = weightedVote;
                        quality3        += SingleTest.TestClassifier(ensemble, testingSet, accuracyMeasure);
                    }

                    quality1 = Math.Round((quality1 / _folds) * 100, 2);
                    //quality2 = Math.Round((quality2 / _folds) * 100, 2);
                    quality3 = Math.Round((quality3 / _folds) * 100, 2);

                    //------------------------------------------------------------------
                    Console.WriteLine("PSO-NCC-CB: " + dataset + " - Accuracy=" + quality1);
                    SaveResults(dataset, "PSO-NNC-CB", quality1.ToString());

                    //Console.WriteLine("PSO-NNC-CB-ens-MV: " + dataset + " - Accuracy=" + quality2);
                    //SaveResults(dataset, "PSO-NNC-CB-ens-MV",  quality2.ToString());

                    Console.WriteLine("PSO-NNC-CB-ens-WV: " + dataset + " - Accuracy=" + quality3);
                    SaveResults(dataset, "PSO-NNC-CB-ens-WV", quality3.ToString());
                    Console.WriteLine("-------------------------------------------");
                    Console.WriteLine("-------------------------------------------");
                    Console.WriteLine("-------------------------------------------");
                }

                catch (Exception ex)
                {
                    LogError(ex);
                    //  Console.WriteLine(ex.Message);
                }

                #endregion

                #region PSO-GKC-CB

                try
                {
                    double quality1 = 0;
                    //double quality2 = 0;
                    double quality3 = 0;


                    for (_currentFold = 0; _currentFold < _folds; _currentFold++)
                    {
                        //----------------------------------------
                        Console.WriteLine("Fold:" + _currentFold.ToString());
                        //----------------------------------------

                        DataMining.Data.Dataset[] tables      = LoadTrainingAndTestingData(dataset, _currentFold);
                        DataMining.Data.Dataset   trainingSet = tables[0];
                        DataMining.Data.Dataset   testingSet  = tables[1];

                        EnsembleClassifier ensemble = SingleTest.CreateGKPSOIBMinerClassifier_ClassBaseWeights_Ensemble(trainingSet);

                        quality1 += SingleTest.TestClassifier(ensemble[0], testingSet, accuracyMeasure);

                        //ensemble.Stratgy = majorityVote;
                        //quality2 += SingleTest.TestClassifier(ensemble, testingSet, accuracyMeasure);

                        ensemble.Stratgy = weightedVote;
                        quality3        += SingleTest.TestClassifier(ensemble, testingSet, accuracyMeasure);
                    }

                    quality1 = Math.Round((quality1 / _folds) * 100, 2);
                    //quality2 = Math.Round((quality2 / _folds) * 100, 2);
                    quality3 = Math.Round((quality3 / _folds) * 100, 2);

                    //------------------------------------------------------------------
                    Console.WriteLine("PSO-GKC-CB-CB: " + dataset + " - Accuracy=" + quality1);
                    SaveResults(dataset, "PSO-GKC-CB", quality1.ToString());

                    //Console.WriteLine("PSO-GKC-CB-ens-MV: " + dataset + " - Accuracy=" + quality2);
                    //SaveResults(dataset, "PSO-GKC-CB-ens-MV",  quality2.ToString());

                    Console.WriteLine("PSO-GKC-CB-ens-WV: " + dataset + " - Accuracy=" + quality3);
                    SaveResults(dataset, "PSO-GKC-CB-ens-WV", quality3.ToString());
                    Console.WriteLine("-------------------------------------------");
                    Console.WriteLine("-------------------------------------------");
                    Console.WriteLine("-------------------------------------------");
                }

                catch (Exception ex)
                {
                    LogError(ex);
                    //  Console.WriteLine(ex.Message);
                }

                #endregion
            }
        }
コード例 #21
0
 internal static HandleRef getCPtr(AccuracyMeasure obj)
 {
     return((obj == null) ? new HandleRef(null, IntPtr.Zero) : obj.swigCPtr);
 }
コード例 #22
0
ファイル: BatchTest2.cs プロジェクト: skn123/iFourmi
        public static void RunConventional()
        {
            AccuracyMeasure accuracyMeasure = new AccuracyMeasure();

            foreach (string dataset in GetDatasetFolds(DatasetNamesFile))
            {
                //----------------------------------------
                Console.WriteLine("Data Table:" + dataset);
                //----------------------------------------

                //try
                {
                    double quality1 = 0;
                    double quality2 = 0;
                    double quality3 = 0;

                    double quality4 = 0;
                    double quality5 = 0;
                    double quality6 = 0;

                    double quality7 = 0;
                    double quality8 = 0;
                    double quality9 = 0;


                    for (_currentFold = 0; _currentFold < _folds; _currentFold++)
                    {
                        //----------------------------------------
                        //Console.WriteLine("Fold:" + _currentFold.ToString());
                        //----------------------------------------

                        DataMining.Data.Dataset[] tables      = LoadTrainingAndTestingData(dataset, _currentFold);
                        DataMining.Data.Dataset   trainingSet = tables[0];
                        DataMining.Data.Dataset   testingSet  = tables[1];



                        KNearestNeighbours knn1 = SingleTest.CreateKNNClassifier(1, trainingSet, false);
                        quality1 += SingleTest.TestClassifier(knn1, testingSet, accuracyMeasure);
                        //------------------------------------------------------------------

                        KNearestNeighbours knn11 = SingleTest.CreateKNNClassifier(11, trainingSet, false);
                        quality2 += SingleTest.TestClassifier(knn11, testingSet, accuracyMeasure);
                        //------------------------------------------------------------------

                        KNearestNeighbours knn21 = SingleTest.CreateKNNClassifier(21, trainingSet, false);
                        quality3 += SingleTest.TestClassifier(knn21, testingSet, accuracyMeasure);
                        //------------------------------------------------------------------

                        //------------------------------------------------------------------
                        //------------------------------------------------------------------

                        NearestClassClassifier ncc0 = SingleTest.CreateNCClassifier(trainingSet, 0);
                        quality4 += SingleTest.TestClassifier(ncc0, testingSet, accuracyMeasure);
                        //------------------------------------------------------------------

                        NearestClassClassifier ncc5 = SingleTest.CreateNCClassifier(trainingSet, 0.5);
                        quality5 += SingleTest.TestClassifier(ncc5, testingSet, accuracyMeasure);
                        //------------------------------------------------------------------

                        NearestClassClassifier ncc1 = SingleTest.CreateNCClassifier(trainingSet, 0.9);
                        quality6 += SingleTest.TestClassifier(ncc1, testingSet, accuracyMeasure);
                        ////------------------------------------------------------------------

                        ////------------------------------------------------------------------
                        ////------------------------------------------------------------------

                        GaussianKernelEstimator gcc0 = SingleTest.CreateGKClassifier(trainingSet, 0);
                        quality7 += SingleTest.TestClassifier(gcc0, testingSet, accuracyMeasure);
                        //------------------------------------------------------------------

                        GaussianKernelEstimator gcc5 = SingleTest.CreateGKClassifier(trainingSet, 0.25);
                        quality8 += SingleTest.TestClassifier(gcc5, testingSet, accuracyMeasure);
                        //------------------------------------------------------------------

                        GaussianKernelEstimator gcc1 = SingleTest.CreateGKClassifier(trainingSet, 0.5);
                        quality9 += SingleTest.TestClassifier(gcc1, testingSet, accuracyMeasure);
                    }

                    quality1 = Math.Round((quality1 / _folds) * 100, 2);
                    quality2 = Math.Round((quality2 / _folds) * 100, 2);
                    quality3 = Math.Round((quality3 / _folds) * 100, 2);

                    quality4 = Math.Round((quality4 / _folds) * 100, 2);
                    quality5 = Math.Round((quality5 / _folds) * 100, 2);
                    quality6 = Math.Round((quality6 / _folds) * 100, 2);

                    quality7 = Math.Round((quality7 / _folds) * 100, 2);
                    quality8 = Math.Round((quality8 / _folds) * 100, 2);
                    quality9 = Math.Round((quality9 / _folds) * 100, 2);

                    Console.WriteLine("1NN: " + dataset + " - Accuracy=" + quality1);
                    SaveResults(dataset, "1NN", quality1.ToString());

                    Console.WriteLine("11NN: " + dataset + " - Accuracy=" + quality2);
                    SaveResults(dataset, "11NN", quality2.ToString());

                    Console.WriteLine("21NN: " + dataset + " - Accuracy=" + quality3);
                    SaveResults(dataset, "21NN", quality3.ToString());


                    Console.WriteLine("NCC-0: " + dataset + " - Accuracy=" + quality4);
                    SaveResults(dataset, "NCC-0", quality4.ToString());

                    Console.WriteLine("NCC-0.5: " + dataset + " - Accuracy=" + quality5);
                    SaveResults(dataset, "NCC-0.5", quality5.ToString());

                    Console.WriteLine("NCC-1: " + dataset + " - Accuracy=" + quality6);
                    SaveResults(dataset, "NCC-1", quality6.ToString());


                    Console.WriteLine("GKE-0: " + dataset + " - Accuracy=" + quality7);
                    SaveResults(dataset, "GKE-0", quality7.ToString());

                    Console.WriteLine("GKE-0.25: " + dataset + " - Accuracy=" + quality8);
                    SaveResults(dataset, "GKE-0.25", quality8.ToString());

                    Console.WriteLine("GKE-0.5: " + dataset + " - Accuracy=" + quality9);
                    SaveResults(dataset, "GKE-0.5", quality9.ToString());

                    Console.WriteLine("-------------------------------------------");
                    Console.WriteLine("-------------------------------------------");
                    Console.WriteLine("-------------------------------------------");
                }
                //catch (Exception ex)
                {
                    //LogError(ex);
                    //  Console.WriteLine(ex.Message);
                }
            }
        }