Example #1
0
        public static void TrainSMO(Instances data)
        {
            //grab SMO, config
            weka.classifiers.functions.SMO smo = new SMO();
            smo.setOptions(weka.core.Utils.splitOptions(" -C 1.0 -L 0.001 -P 1.0E-12 -N 0 -V -1 -W 1 -K \"weka.classifiers.functions.supportVector.PolyKernel -C 250007 -E 1.0\""));

            //train
            smo.buildClassifier(data);

            //test on self should get 100%
            weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(data);
            eval.evaluateModel(smo, data);
            Training_Output.printWekaResults(eval.toSummaryString("\nResults\n======\n", false));

            //save model serialize model
            weka.core.SerializationHelper.write(GuiPreferences.Instance.WorkDirectory + "TrainSet_" + GuiPreferences.Instance.NudClassifyUsingTR.ToString() + "th_vectors_scaledCS_filteredIG.model", smo);

            //load model deserialize model
            smo = (weka.classifiers.functions.SMO)weka.core.SerializationHelper.read(GuiPreferences.Instance.WorkDirectory + "TrainSet_" + GuiPreferences.Instance.NudClassifyUsingTR.ToString() + "th_vectors_scaledCS_filteredIG.model");

            //test loaded model
            eval = new weka.classifiers.Evaluation(data);
            eval.evaluateModel(smo, data);
            Training_Output.printWekaResults(eval.toSummaryString("\nResults\n======\n", false));
        }
    protected void Button2_Click(object sender, EventArgs e)
    {
        weka.core.Instances data = new weka.core.Instances(new java.io.FileReader("d:\\train.arff"));
        data.setClassIndex(data.numAttributes() - 1);
        weka.classifiers.Classifier cls = new weka.classifiers.bayes.NaiveBayes();
        // weka.classifiers.functions.supportVector.SMOset();
        int runs  = 1;
        int folds = 10;

        //string sq = "delete from nbresults";
        //dbc.execfn(sq);
        // perform cross-validation
        for (int i = 0; i < runs; i++)
        {
            // randomize data
            int seed = i + 1;
            java.util.Random    rand     = new java.util.Random(seed);
            weka.core.Instances randData = new weka.core.Instances(data);
            randData.randomize(rand);
            if (randData.classAttribute().isNominal())
            {
                randData.stratify(folds);
            }
            // weka.classifiers.trees.j48 jj;
            weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(randData);
            for (int n = 0; n < folds; n++)
            {
                weka.core.Instances train = randData.trainCV(folds, n);
                weka.core.Instances test  = randData.testCV(folds, n);
                // build and evaluate classifier
                weka.classifiers.Classifier clsCopy = weka.classifiers.Classifier.makeCopy(cls);
                clsCopy.buildClassifier(train);

                eval.evaluateModel(clsCopy, test);
            }

            preci_value.Text  = eval.precision(0).ToString();
            recall_value.Text = eval.recall(0).ToString();
            acc_value.Text    = eval.fMeasure(0).ToString();

            string s = "NB";
            //    string str = "insert into evaluation values('" + instid.Text + "','" + courid.Text.ToString() + "','" + preci_value.Text.ToString() + "','" + recall_value.Text.ToString() + "','" + acc_value.Text.ToString() + "','" + s + "' )";
            //  db.execfn(str);
            //  MessageBox.Show("saved");
        }
    }
Example #3
0
        public static void Test()
        {
            weka.core.Instances data = new weka.core.Instances(new java.io.FileReader("./data/Classification/Communication.arff"));
            data.setClassIndex(data.numAttributes() - 1);

            weka.classifiers.Classifier cls = new weka.classifiers.bayes.BayesNet();


            //Save BayesNet results in .txt file
            using (System.IO.StreamWriter file = new System.IO.StreamWriter("./data/Classification/Communication_Report.txt"))
            {
                file.WriteLine("Performing " + percentSplit + "% split evaluation.");

                int runs = 1;

                // perform cross-validation
                for (int i = 0; i < runs; i++)
                {
                    // randomize data
                    int seed = i + 1;
                    java.util.Random    rand     = new java.util.Random(seed);
                    weka.core.Instances randData = new weka.core.Instances(data);
                    randData.randomize(rand);

                    //weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(randData);

                    int trainSize             = (int)Math.Round((double)data.numInstances() * percentSplit / 100);
                    int testSize              = data.numInstances() - trainSize;
                    weka.core.Instances train = new weka.core.Instances(data, 0, 0);
                    weka.core.Instances test  = new weka.core.Instances(data, 0, 0);
                    train.setClassIndex(train.numAttributes() - 1);
                    test.setClassIndex(test.numAttributes() - 1);

                    //Print classifier analytics for all the dataset
                    file.WriteLine("EVALUATION OF TEST DATASET.");

                    //int numCorrect = 0;
                    for (int j = 0; j < data.numInstances(); j++)
                    {
                        weka.core.Instance currentInst = randData.instance(j);

                        if (j < trainSize)
                        {
                            train.add(currentInst);
                        }

                        else
                        {
                            test.add(currentInst);

                            /*
                             * double predictedClass = cls.classifyInstance(currentInst);
                             *
                             * double[] prediction = cls.distributionForInstance(currentInst);
                             *
                             * for (int p = 0; p < prediction.Length; p++)
                             * {
                             *  file.WriteLine("Probability of class [{0}] for [{1}] is: {2}", currentInst.classAttribute().value(p), currentInst, Math.Round(prediction[p], 4));
                             * }
                             * file.WriteLine();
                             *
                             * file.WriteLine();
                             * if (predictedClass == data.instance(j).classValue())
                             *  numCorrect++;*/
                        }
                    }

                    // build and evaluate classifier
                    cls.buildClassifier(train);

                    // Test the model
                    weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(randData);
                    eval.evaluateModel(cls, test);

                    // Print the results as in Weka explorer:
                    //Print statistics
                    String strSummaryTest = eval.toSummaryString();

                    file.WriteLine(strSummaryTest);
                    file.WriteLine();

                    //Print detailed class statistics
                    file.WriteLine(eval.toClassDetailsString());
                    file.WriteLine();

                    //Print confusion matrix
                    file.WriteLine(eval.toMatrixString());
                    file.WriteLine();

                    // Get the confusion matrix
                    double[][] cmMatrixTest = eval.confusionMatrix();

                    System.Console.WriteLine("Bayesian Network results saved in Communication_Report.txt file successfully.");
                }
            }
        }
Example #4
0
        public static void BayesTest()
        {
            try
            {
                weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader("iris.arff"));
                insts.setClassIndex(insts.numAttributes() - 1);

                weka.classifiers.Classifier cl = new weka.classifiers.bayes.BayesNet();
                System.Console.WriteLine("Performing " + percentSplit + "% split evaluation.");

                //randomize the order of the instances in the dataset.
                weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                myRandom.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myRandom);

                int trainSize             = insts.numInstances() * percentSplit / 100;
                int testSize              = insts.numInstances() - trainSize;
                weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);
                weka.core.Instances test  = new weka.core.Instances(insts, 0, 0);


                cl.buildClassifier(train);
                //print model
                System.Console.WriteLine(cl);

                int numCorrect = 0;
                for (int i = trainSize; i < insts.numInstances(); i++)
                {
                    weka.core.Instance currentInst    = insts.instance(i);
                    double             predictedClass = cl.classifyInstance(currentInst);
                    test.add(currentInst);

                    double[] prediction = cl.distributionForInstance(currentInst);

                    for (int x = 0; x < prediction.Length; x++)
                    {
                        System.Console.WriteLine("Probability of class [{0}] for [{1}] is: {2}", currentInst.classAttribute().value(x), currentInst, Math.Round(prediction[x], 4));
                    }
                    System.Console.WriteLine();

                    if (predictedClass == insts.instance(i).classValue())
                    {
                        numCorrect++;
                    }
                }
                System.Console.WriteLine(numCorrect + " out of " + testSize + " correct (" +
                                         (double)((double)numCorrect / (double)testSize * 100.0) + "%)");

                // Train the model
                weka.classifiers.Evaluation eTrain = new weka.classifiers.Evaluation(train);
                eTrain.evaluateModel(cl, train);

                // Print the results as in Weka explorer:
                //Print statistics
                String strSummaryTrain = eTrain.toSummaryString();
                System.Console.WriteLine(strSummaryTrain);

                //Print detailed class statistics
                System.Console.WriteLine(eTrain.toClassDetailsString());

                //Print confusion matrix
                System.Console.WriteLine(eTrain.toMatrixString());

                // Get the confusion matrix
                double[][] cmMatrixTrain = eTrain.confusionMatrix();


                // Test the model
                weka.classifiers.Evaluation eTest = new weka.classifiers.Evaluation(test);
                eTest.evaluateModel(cl, test);

                // Print the results as in Weka explorer:
                //Print statistics
                String strSummaryTest = eTest.toSummaryString();
                System.Console.WriteLine(strSummaryTest);

                //Print detailed class statistics
                System.Console.WriteLine(eTest.toClassDetailsString());

                //Print confusion matrix
                System.Console.WriteLine(eTest.toMatrixString());

                // Get the confusion matrix
                double[][] cmMatrixTest = eTest.confusionMatrix();
            }

            catch (java.lang.Exception ex)
            {
                ex.printStackTrace();
            }
        }
Example #5
0
        public static void cvdTest()
        {
            weka.core.Instances data = new weka.core.Instances(new java.io.FileReader("./data/Classification/Communication.arff"));
            data.setClassIndex(data.numAttributes() - 1);

            weka.classifiers.Classifier cls = new weka.classifiers.bayes.NaiveBayes();

            //Save BayesNet results in .txt file
            using (System.IO.StreamWriter file = new System.IO.StreamWriter("./data/Classification/Communication_Report.txt"))
            {
                int runs  = 1;
                int folds = 10;

                // perform cross-validation
                for (int i = 0; i < runs; i++)
                {
                    // randomize data
                    int seed = i + 1;
                    java.util.Random    rand     = new java.util.Random(seed);
                    weka.core.Instances randData = new weka.core.Instances(data);
                    randData.randomize(rand);
                    if (randData.classAttribute().isNominal())
                    {
                        randData.stratify(folds);
                    }

                    weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(randData);
                    for (int n = 0; n < folds; n++)
                    {
                        weka.core.Instances train = randData.trainCV(folds, n);
                        weka.core.Instances test  = randData.testCV(folds, n);
                        // build and evaluate classifier
                        //weka.classifiers.Classifier clsCopy = weka.classifiers.Classifier.makeCopy(cls);
                        cls.buildClassifier(train);
                        //eval.evaluateModel(cls, test);

                        //Print classifier analytics for all the dataset
                        file.WriteLine("EVALUATION OF TEST DATASET.");
                        // Test the model
                        weka.classifiers.Evaluation eTest = new weka.classifiers.Evaluation(test);
                        eTest.evaluateModel(cls, test);

                        // Print the results as in Weka explorer:
                        //Print statistics
                        String strSummaryTest = eTest.toSummaryString();

                        file.WriteLine(strSummaryTest);
                        file.WriteLine();

                        //Print detailed class statistics
                        file.WriteLine(eTest.toClassDetailsString());
                        file.WriteLine();

                        //Print confusion matrix
                        file.WriteLine(eTest.toMatrixString());
                        file.WriteLine();

                        // Get the confusion matrix
                        double[][] cmMatrixTest = eTest.confusionMatrix();

                        System.Console.WriteLine("Bayesian Network results saved in Communication_Report.txt file successfully.");
                    }

                    //Print classifier analytics for all the dataset
                    file.WriteLine("EVALUATION OF ALL DATASET.");

                    cls.buildClassifier(data);

                    // Train the model
                    weka.classifiers.Evaluation eAlldata = new weka.classifiers.Evaluation(data);
                    eAlldata.evaluateModel(cls, data);

                    // Print the results as in Weka explorer:
                    //Print statistics
                    String strSummaryAlldata = eAlldata.toSummaryString();
                    file.WriteLine(strSummaryAlldata);
                    file.WriteLine();

                    //Print detailed class statistics
                    file.WriteLine(eAlldata.toClassDetailsString());
                    file.WriteLine();

                    //Print confusion matrix
                    file.WriteLine(eAlldata.toMatrixString());
                    file.WriteLine("----------------");

                    //print model
                    file.WriteLine(cls);
                    file.WriteLine();
                }
            }
        }
Example #6
0
        private static void WekaTrainingPipeline(Instances data)
        {
            //grab SMO, config
            TrainingTesting_SharedVariables.smo = new SMO();
            TrainingTesting_SharedVariables.smo.setOptions(weka.core.Utils.splitOptions(" -C 1.0 -L 0.001 -P 1.0E-12 -N 0 -V -1 -W 1 -K \"weka.classifiers.functions.supportVector.PolyKernel -C 250007 -E 1.0\""));
            GuiPreferences.Instance.setLog("SMO Assigned.");

            //train
            TrainingTesting_SharedVariables.smo.buildClassifier(data);
            GuiPreferences.Instance.setLog("Training on Data");

            //test on self should get 100%
            weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(data);
            eval.evaluateModel(TrainingTesting_SharedVariables.smo, data);
            Training_Output.printWekaResults(eval.toSummaryString("\nResults\n======\n", false));
            GuiPreferences.Instance.setLog("SMO Model Tested on Training Data, check that you get 100%.");

            //save model serialize model
            weka.core.SerializationHelper.write(GuiPreferences.Instance.WorkDirectory + "TrainSet_" + GuiPreferences.Instance.NudClassifyUsingTR.ToString() + "th_vectors_scaledCS_filteredIG.libsvm.arff.model", TrainingTesting_SharedVariables.smo);
            GuiPreferences.Instance.setLog("SMO Model Serialized and saved.");

            //load model deserialize model
            TrainingTesting_SharedVariables.smo = (weka.classifiers.functions.SMO)weka.core.SerializationHelper.read(GuiPreferences.Instance.WorkDirectory + "TrainSet_" + GuiPreferences.Instance.NudClassifyUsingTR.ToString() + "th_vectors_scaledCS_filteredIG.libsvm.arff.model");
            GuiPreferences.Instance.setLog("SMO Model DeSerialized and loaded.");

            //test loaded model
            eval = new weka.classifiers.Evaluation(data);
            eval.evaluateModel(TrainingTesting_SharedVariables.smo, data);
            Training_Output.printWekaResults(eval.toSummaryString("\nResults\n======\n", false));
            GuiPreferences.Instance.setLog("SMO Model Tested on data (sanity check for loaded model).");

            //display top IG on dicom view
            if (Preferences.Instance.attsel == null)
            {
                GuiPreferences.Instance.setLog("there are no ranked IG attributes or selected attr, continuing but please fix this possible bug.");
            }

            GuiPreferences.Instance.setLog("Dicom Viewer Displaying..");
            string dicomDir = GuiPreferences.Instance.WorkDirectory;

            dicomDir = dicomDir.Substring(0, dicomDir.Length - 4) + @"master\";
            string[] files     = System.IO.Directory.GetFiles(dicomDir, "*.dcm");
            string   firstFile = files[0].Substring(files[0].LastIndexOf(@"\") + 1);


            bool thresholdOrVoxelAmount;

            if (GuiPreferences.Instance.IgSelectionType == IGType.Threshold)
            {
                thresholdOrVoxelAmount = true;
            }
            else
            {
                thresholdOrVoxelAmount = false;
            }
            Form plotForm = new DicomImageViewer.MainForm(dicomDir + firstFile, firstFile,
                                                          Preferences.Instance.attsel.rankedAttributes(),
                                                          Convert.ToDouble(GuiPreferences.Instance.NudIGThreshold),
                                                          Convert.ToInt32(GuiPreferences.Instance.NudIGVoxelAmount),
                                                          thresholdOrVoxelAmount);// _trainTopIGFeatures);

            plotForm.StartPosition = FormStartPosition.CenterParent;
            plotForm.ShowDialog();
            plotForm.Close();
            GuiPreferences.Instance.setLog("Dicom Viewer Closed.");
        }