protected override AbstractClassifier GenerateClassifier()
 {
     AbstractClassifier ac;
     ac = new J48();
     //ac = new DecisionStump();
     //ac = new LMT();
     //ac = new M5P();
     //ac = new NaiveBayes();
     //ac = new BayesNet();
     //bn.setUseADTree(true);
     //bn.setSearchAlgorithm(new weka.classifiers.bayes.net.search.ci.CISearchAlgorithm());
     //ac = new LWL();
     //lwl.setKNN(3);
     //ac = new RandomForest();
     //rf.setMaxDepth(20);
     //rf.setNumFeatures(100);
     //rf.setNumTrees(100);
     //ac = new REPTree();
     //ac = new IBk(3);
     //ac = new REPTree();
     //ac.setUseMDLcorrection(false);
     //ac.setSubtreeRaising(true);
     //ac.setUseLaplace(true);
     //ac.setReducedErrorPruning(true);
     return ac;
 }
Exemple #2
0
 public FormForClassificationTree DisplayTree(cGlobalInfo GlobalInfo, J48 J48Model, bool IsCellular)
 {
     FormForClassificationTree WindowForTree = new FormForClassificationTree();
     string StringForTree = J48Model.graph().Remove(0, J48Model.graph().IndexOf("{") + 2);
     WindowForTree.gViewerForTreeClassif.Graph = cGlobalInfo.WindowHCSAnalyzer.ComputeAndDisplayGraph(StringForTree.Remove(StringForTree.Length - 3, 3), IsCellular);
     return WindowForTree;
 }
Exemple #3
0
        public void EvaluateIncrementalExamples()
        {
            //Calculate the number of increments for the training data based on the increment size
            int numberIncrements = (int)Math.Ceiling((double)(this.numExamples * (DEFAULT_FOLDS - 1) / DEFAULT_FOLDS) / (double)TRAINING_INCRMENETS);
            this.trainingSizeMatrix = new double[this.classCount, DEFAULT_FOLDS, numberIncrements];
            for (int i = 0; (i < this.classCount); i++)
                for (int j = 0; (j < DEFAULT_FOLDS); j++)
                    for (int k = 0; (k < numberIncrements); k++)
                        this.trainingSizeMatrix[i, j, k] = 0.0;

            //Randomize the data
            Randomize randomizeFilter = new Randomize();
            randomizeFilter.setInputFormat(this.data);
            Instances randomData = Filter.useFilter(this.data, randomizeFilter);




            //Run incremental training data for each fold and store the results for each activity

            for (int i = 1; (i <= DEFAULT_FOLDS); i++)
            {
                //Training folds filter
                RemoveFolds trainingFoldsFilter = new RemoveFolds();
                trainingFoldsFilter.set_NumFolds(DEFAULT_FOLDS);
                trainingFoldsFilter.inputFormat(randomData);
                trainingFoldsFilter.set_InvertSelection(true);
                trainingFoldsFilter.set_Fold(i);
                Instances alltraining = Filter.useFilter(randomData, trainingFoldsFilter);

                RemoveFolds testFoldsFilter = new RemoveFolds();
                testFoldsFilter.set_NumFolds(DEFAULT_FOLDS);
                testFoldsFilter.inputFormat(randomData);
                testFoldsFilter.set_InvertSelection(false);
                testFoldsFilter.set_Fold(i);
                Instances test = Filter.useFilter(randomData, testFoldsFilter);
                for (int j = 1; (j <= numberIncrements); j++)
                {
                    //Range Filter
                    RemoveRange rangeFilter = new RemoveRange();
                    rangeFilter.setInputFormat(alltraining);
                    int first = 1;
                    int last = j * TRAINING_INCRMENETS;
                    if (last > (alltraining.m_Instances.size()))
                        last = alltraining.m_Instances.size();
                    string range = first.ToString() + "-" + last.ToString();
                    rangeFilter.set_InstancesIndices(range);
                    rangeFilter.set_InvertSelection(true);
                    Instances training = Filter.useFilter(alltraining, rangeFilter);

                    //ready for training and testing
                    J48 tree = new J48();         // new instance of tree
                    tree.set_MinNumObj(10);
                    tree.set_ConfidenceFactor((float)0.25);
                    tree.buildClassifier(training);   // build classifier
                    Evaluation eval = new Evaluation(training);
                    eval.evaluateModel(tree, test);


                    //store the results for each activity
                    for (int k = 0; (k < this.classCount); k++)
                    {
                        double tpRate = eval.truePositiveRate(k);
                        trainingSizeMatrix[k, i - 1, j - 1] = +eval.truePositiveRate(k);
                    }

                }


            }


            TextWriter tw = new StreamWriter("evaluation.txt");
            for (int i = 0; (i < this.classCount); i++)
            {
                string line = randomData.attribute(this.data.numAttributes() - 1).value_Renamed(i);

                for (int k = 0; (k < numberIncrements); k++)
                {
                    double percentage = 0.0;
                    for (int j = 0; (j < DEFAULT_FOLDS); j++)
                        percentage += this.trainingSizeMatrix[i, j, k];
                    percentage /= DEFAULT_FOLDS;
                    percentage *= 100;
                    line += "," + percentage.ToString("0.00");
                }
                tw.WriteLine(line);
            }
            tw.Close();


            for (int i = 0; (i < this.classCount); i++)
            {
                string activity = randomData.attribute(this.data.numAttributes() - 1).value_Renamed(i);
                tw = new StreamWriter("evaluation-" + activity + ".txt");
                for (int j = 0; (j < DEFAULT_FOLDS); j++)
                {
                    string line = j.ToString();
                    for (int k = 0; (k < numberIncrements); k++)
                    {
                        double percentage = this.trainingSizeMatrix[i, j, k];
                        percentage /= DEFAULT_FOLDS;
                        percentage *= 100;
                        line += "\t" + percentage.ToString("0.00");
                    }
                    tw.WriteLine(line);
                }
                tw.Close();
            }

        }
Exemple #4
-1
        public static void Test_predictClass(string classifierFileName)
        {
            FileReader javaFileReader = new FileReader(classifierFileName);

            weka.core.Instances insts = new weka.core.Instances(javaFileReader);
            javaFileReader.close();

            insts.setClassIndex(insts.numAttributes() - 1);

            weka.classifiers.Classifier cl = new weka.classifiers.trees.J48();
            System.Console.WriteLine("Performing " + percentSplit + "% split evaluation.");



            #region Manual Cross Fold
            Instances foldsData = new Instances(insts);
            int       folds     = 10;
            for (int n = 0; n < folds; n++)
            {
                Instances trainFold = foldsData.trainCV(folds, n);
                Instances testFold  = foldsData.testCV(folds, n);
            }
            #endregion



            #region
            int trainSize             = insts.numInstances() * percentSplit / 100;
            int testSize              = insts.numInstances() - trainSize;
            weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);

            cl.buildClassifier(train);
            #endregion

            //Classifier cls = new J48();
            Evaluation       eval = new Evaluation(insts);
            java.util.Random rand = new java.util.Random(1);  // using seed = 1
            int fold = 10;
            eval.crossValidateModel(cl, insts, fold, rand);
            System.Console.WriteLine("toClassDetailsString" + eval.toClassDetailsString());
            System.Console.WriteLine("toMatrixString\n" + eval.toMatrixString());
            System.Console.WriteLine("toCumulativeMarginDistributionString\n" + eval.toCumulativeMarginDistributionString());
            //System.Console.WriteLine("predictions\n" + eval.predictions());
            System.Console.ReadKey();

            //var numnerOfInst = insts.numInstances();

            //for (int i = trainSize; i < numnerOfInst; i++)
            //{
            //    weka.core.Instance currentInst = insts.instance(i);

            //    double pred = cl.classifyInstance(currentInst);
            //    System.Console.WriteLine("class Index: " + insts.instance(i).classIndex());
            //    System.Console.WriteLine(", class value: " + insts.instance(i).classValue());
            //    System.Console.WriteLine(", ID: " + insts.instance(i).value(0));
            //    System.Console.WriteLine(", actual: " + insts.classAttribute().value((int)insts.instance(i).classValue()));
            //    System.Console.WriteLine(", predicted: " + insts.classAttribute().value((int)pred));

            //}
        }