예제 #1
0
        public void Test()
        {
            weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader("D:\\android_analysis\\attributes.arff"));
            insts.setClassIndex(insts.numAttributes() - 1);

            weka.classifiers.Classifier cl = new weka.classifiers.trees.J48();
            cl.buildClassifier(insts);
            weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
            myRandom.setInputFormat(insts);
            insts = weka.filters.Filter.useFilter(insts, myRandom);

            int trainSize = (int)(insts.numInstances() * 0.66);
            int testSize  = insts.numInstances() - trainSize;

            weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);

            cl.buildClassifier(train);
            for (int i = trainSize; i < insts.numInstances(); i++)
            {
                weka.core.Instance currentInst    = insts.instance(i);
                double             predictedClass = cl.classifyInstance(currentInst);
                double[]           distrs         = cl.distributionForInstance(currentInst);
                string             actual         = insts.classAttribute().value((int)currentInst.classValue());
                string             predicted      = insts.classAttribute().value((int)predictedClass);
                System.Console.WriteLine("ID: " + (i + 1) + ", " + actual + " --> " + predicted);
            }
        }
예제 #2
0
        public static Classifier GetWekaClassifier(string algorithm, string trainingSetPath)
        {
            Classifier classifier = null;

            switch (algorithm)
            {
            case "KNN":
            {
                classifier = new weka.classifiers.lazy.IB1();
                WekaNETBridge.WekaClassification wekaClassification = new WekaNETBridge.WekaClassification(trainingSetPath, classifier);
                classifier = wekaClassification.CreateClassifier();
            }
            break;

            case "NBayes":
            {
                classifier = new weka.classifiers.bayes.NaiveBayes();
                WekaNETBridge.WekaClassification wekaClassification = new WekaNETBridge.WekaClassification(trainingSetPath, classifier);
                classifier = wekaClassification.CreateClassifier();
            }
            break;

            case "JRip":
            {
                classifier = new weka.classifiers.rules.JRip();
                WekaNETBridge.WekaClassification wekaClassification = new WekaNETBridge.WekaClassification(trainingSetPath, classifier);
                classifier = wekaClassification.CreateClassifier();
            }
            break;

            case "J48":
            {
                classifier = new weka.classifiers.trees.J48();
                WekaNETBridge.WekaClassification wekaClassification = new WekaNETBridge.WekaClassification(trainingSetPath, classifier);
                classifier = wekaClassification.CreateClassifier();
            }
            break;

            case "NeuralNets":
            {
                classifier = new weka.classifiers.functions.MultilayerPerceptron();
                WekaNETBridge.WekaClassification wekaClassification = new WekaNETBridge.WekaClassification(trainingSetPath, classifier);
                classifier = wekaClassification.CreateClassifier();
            }
            break;

            case "SVM":
            {
                classifier = new weka.classifiers.functions.SMO();
                WekaNETBridge.WekaClassification wekaClassification = new WekaNETBridge.WekaClassification(trainingSetPath, classifier);
                classifier = wekaClassification.CreateClassifier();
            }
            break;
            }

            return(classifier);
        }
예제 #3
0
        public static void CalculateSuccessForDt(weka.core.Instances originalInsts)
        {
            try
            {
                var form = Form.ActiveForm as Form1;
                form.successPrcDt.Text = "Training...";
                form.successRtDt.Text  = "../" + testSize;

                weka.core.Instances insts = originalInsts;

                // Pre-process
                insts = ConvertNumericToNominal(insts);
                insts = Normalize(insts);

                // Classify
                weka.classifiers.Classifier cl    = new weka.classifiers.trees.J48();
                weka.core.Instances         train = new weka.core.Instances(insts, 0, trainSize);
                cl.buildClassifier(train);

                int    numCorrect = 0;
                double percentage = 0;
                for (int i = trainSize; i < insts.numInstances(); i++)
                {
                    weka.core.Instance currentInst    = insts.instance(i);
                    double             predictedClass = cl.classifyInstance(currentInst);
                    if (predictedClass == insts.instance(i).classValue())
                    {
                        numCorrect++;
                    }

                    percentage             = (double)numCorrect / (double)testSize * 100.0;
                    form.successRtDt.Text  = numCorrect + "/" + testSize;
                    form.successPrcDt.Text = String.Format("{0:0.00}", percentage) + "%";
                }
                succesRates.Add(Classifier.ID3, percentage);
                classifiers.Add(Classifier.ID3, cl);
            }
            catch (java.lang.Exception ex)
            {
                ex.printStackTrace();
                MessageBox.Show(ex.ToString(), "Error for Decision Tree", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
            catch (Exception)
            {
                MessageBox.Show("Error for Decision Tree", "Error for Decision Tree", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
        }
예제 #4
0
파일: Form1.cs 프로젝트: OniriiWu/weka.NET
        public static string classifyTest()
        {
            try
            {
                String result = "";

                weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader("C:\\Program Files\\Weka-3-7\\data\\iris.arff"));
                insts.setClassIndex(insts.numAttributes() - 1);

                weka.classifiers.Classifier cl = new weka.classifiers.trees.J48();
                //  Console.WriteLine("Performing " + percentSplit + "% split evaluation.");
                result += "Performing " + percentSplit + "% split evaluation.\n";
                //randomize the order of the instances in the dataset.
                weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                myRandom.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myRandom);

                int trainSize             = insts.numInstances() * percentSplit / 100;
                int testSize              = insts.numInstances() - trainSize;
                weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);

                cl.buildClassifier(train);
                int numCorrect = 0;
                for (int i = trainSize; i < insts.numInstances(); i++)
                {
                    weka.core.Instance currentInst    = insts.instance(i);
                    double             predictedClass = cl.classifyInstance(currentInst);
                    if (predictedClass == insts.instance(i).classValue())
                    {
                        numCorrect++;
                    }
                }
                //Console.WriteLine(numCorrect + " out of " + testSize + " correct (" + (double)((double)numCorrect / (double)testSize * 100.0) + "%)");
                result += (numCorrect + " out of " + testSize + " correct (" + (double)((double)numCorrect / (double)testSize * 100.0) + "%)");

                return(result);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
                return(ex.Message);
            }
        }
    private void initialiseClassifier()
    {
        // Loads the file to grab the filename.
        GameObject      player = GameObject.Find("Player");
        PlayerBehaviour playerBehaviourScript = player.GetComponent <PlayerBehaviour>();

        //loads training and test data, builds new classifier and trains it
        insts = new weka.core.Instances(new java.io.FileReader(playerBehaviourScript.stringToEdit));
        insts.setClassIndex(4);
        j48Classifier = new weka.classifiers.trees.J48();
        //Optionals methods currently testing for improvements
        //j48Classifier.setUnpruned (true);
        //j48Classifier.setUseLaplace (true);
        j48Classifier.buildClassifier(insts);
        //NO CURRENT TEST SET READY TO USE
        //weka.core.Instances testData = new weka.core.Instances(new java.io.FileReader("TestSet.arff"));
        //testData.setClassIndex(5);
        //weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(insts);
        //eval.evaluateModel (j48Classifier,testData);
        Debug.Log(j48Classifier.toSummaryString());
    }
예제 #6
0
파일: Brain.cs 프로젝트: kovacshuni/slrt
        public static void classifyTest()
        {
            try
            {
                Console.WriteLine("Hello Java, from C#!");
                weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader("D:/iris.arff"));
                insts.setClassIndex(insts.numAttributes() - 1);

                weka.classifiers.Classifier cl = new weka.classifiers.trees.J48();
                Console.WriteLine("Performing " + 33 + "% split evaluation.");

                //randomize the order of the instances in the dataset.
                weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                myRandom.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myRandom);

                int trainSize = insts.numInstances() * 33 / 100;
                int testSize = insts.numInstances() - trainSize;
                weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);

                cl.buildClassifier(train);
                int numCorrect = 0;
                for (int i = trainSize; i < insts.numInstances(); i++)
                {
                    weka.core.Instance currentInst = insts.instance(i);
                    double predictedClass = cl.classifyInstance(currentInst);
                    if (predictedClass == insts.instance(i).classValue())
                        numCorrect++;
                }
                Console.WriteLine(numCorrect + " out of " + testSize + " correct (" +
                           (double)((double)numCorrect / (double)testSize * 100.0) + "%)");
            }
            catch (Exception ex)
            {

            }
        }
예제 #7
0
파일: Main.cs 프로젝트: UnderNotic/weka2wpf
        public static void ClassifyTest()
        {
            try
            {
                weka.core.Instances insts = new ConverterUtils.DataSource("weka").getDataSet();
                insts.setClassIndex(insts.numAttributes() - 1);

                weka.classifiers.Classifier cl = new weka.classifiers.trees.J48();
                Console.WriteLine("Performing " + percentSplit + "% split evaluation.");

                //randomize the order of the instances in the dataset.
                weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                myRandom.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myRandom);

                int trainSize = insts.numInstances()*percentSplit/100;
                int testSize = insts.numInstances() - trainSize;
                weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);

                cl.buildClassifier(train);
                int numCorrect = 0;
                for (int i = trainSize; i < insts.numInstances(); i++)
                {
                    weka.core.Instance currentInst = insts.instance(i);
                    double predictedClass = cl.classifyInstance(currentInst);
                    if (predictedClass == insts.instance(i).classValue())
                        numCorrect++;
                }
                Console.WriteLine(numCorrect + " out of " + testSize + " correct (" +
                                  (double) ((double) numCorrect/(double) testSize*100.0) + "%)");
            }
            catch (java.lang.Exception ex)
            {
                ex.printStackTrace();
            }
        }
예제 #8
0
        public static string classifyTest(string file, string classifier)
        {
            string data = "No data";

            try
            {
                //weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader("C:\\Users\\kinli\\source\\repos\\WebApplication2\\WebApplication2\\iris.arff"));
                weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader(file));
                insts.setClassIndex(insts.numAttributes() - 1);
                weka.classifiers.Classifier cl = new weka.classifiers.trees.J48();

                if (classifier == "J48")
                {
                    cl = new weka.classifiers.trees.J48();
                }
                else if (classifier == "MLP")
                {
                    cl = new weka.classifiers.functions.MultilayerPerceptron();
                }
                else if (classifier == "NaiveBayes")
                {
                    cl = new weka.classifiers.bayes.NaiveBayes();
                }

                //data = ("Performing " + percentSplit + "% split evaluation.\n");
                data = ("Performing use training set evaluation.\n");
                //randomize the order of the instances in the dataset.

                weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                myRandom.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myRandom);

                /*
                 * int trainSize = insts.numInstances() * percentSplit / 100;
                 * int testSize = insts.numInstances() - trainSize;
                 * weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);
                 *
                 * cl.buildClassifier(train);
                 * int numCorrect = 0;
                 * for (int i = trainSize; i < insts.numInstances(); i++)
                 * {
                 *  weka.core.Instance currentInst = insts.instance(i);
                 *  double predictedClass = cl.classifyInstance(currentInst);
                 *  if (predictedClass == insts.instance(i).classValue())
                 *      numCorrect++;
                 * }*/
                cl.buildClassifier(insts);

                int numCorrect = 0;
                for (int i = 0; i < insts.numInstances(); i++)
                {
                    weka.core.Instance currentInst    = insts.instance(i);
                    double             predictedClass = cl.classifyInstance(currentInst);
                    if (predictedClass == insts.instance(i).classValue())
                    {
                        numCorrect++;
                    }
                }
                data = data + (numCorrect + " out of " + insts.numInstances() + " correct (" +
                               (double)((double)numCorrect / (double)insts.numInstances() * 100.0) + "%)");
            }
            catch (java.lang.Exception ex)
            {
                data = "Error";
                ex.printStackTrace();
            }
            return(data);
        }
예제 #9
0
        private List<string> ComputePlateBasedClassification(int Classes, int MinObjectsNumber, cPlate CurrentPlateToProcess)
        {
            CurrentPlateToProcess.ComputePlateBasedDescriptors();
            weka.core.Instances insts = CurrentPlateToProcess.CreateInstancesWithClassesWithPlateBasedDescriptor(Classes);
            weka.classifiers.trees.J48 ClassificationModel = new weka.classifiers.trees.J48();
            ClassificationModel.setMinNumObj(MinObjectsNumber);

            weka.core.Instances train = new weka.core.Instances(insts, 0, insts.numInstances());
            ClassificationModel.buildClassifier(train);

            // display the tree
            string DotString = ClassificationModel.graph().Remove(0, ClassificationModel.graph().IndexOf("{") + 2);
            int DotLenght = DotString.Length;

            string NewDotString = DotString.Remove(DotLenght - 3, 3);
            ComputeAndDisplayGraph(NewDotString);
            return ComputeGraph(NewDotString, Classes);
        }
예제 #10
0
파일: Classif.cs 프로젝트: cyrenaique/HCSA
        /// <summary>
        /// Plate by plate classification
        /// </summary>
        /// <param name="NeutralClass">Neutral class</param>
        /// <param name="IdxClassifier">Classifier Index (0:J48), (1:SVM), (2:NN), (3:KNN)</param>
        private void ClassificationPlateByPlate(int NeutralClass, int IdxClassifier)
        {
            int NumberOfPlates = cGlobalInfo.CurrentScreening.ListPlatesActive.Count;

            for (int PlateIdx = 0; PlateIdx < NumberOfPlates; PlateIdx++)
            {
                cPlate CurrentPlateToProcess = cGlobalInfo.CurrentScreening.ListPlatesActive.GetPlate(cGlobalInfo.CurrentScreening.ListPlatesActive[PlateIdx].GetName());
                cInfoClass InfoClass = CurrentPlateToProcess.GetNumberOfClassesBut(NeutralClass);
                // return;
                if (InfoClass.NumberOfClass <= 1)
                {
                    richTextBoxInfoClassif.AppendText(CurrentPlateToProcess.GetName() + " not processed.\n");
                    continue;
                }

                weka.core.Instances insts = CurrentPlateToProcess.CreateInstancesWithClasses(InfoClass, NeutralClass);
                Classifier ClassificationModel = null;
                string Text = "";
                switch (IdxClassifier)
                {
                    case 0: // J48
                        ClassificationModel = new weka.classifiers.trees.J48();
                        weka.classifiers.trees.J48 J48Model = (weka.classifiers.trees.J48)ClassificationModel;
                        J48Model.setMinNumObj((int)cGlobalInfo.OptionsWindow.numericUpDownJ48MinNumObjects.Value);
                        Text = "J48 - ";
                        break;
                    case 1: // SVM
                        ClassificationModel = new weka.classifiers.functions.SMO();
                        Text = "SVM - ";
                        break;
                    case 2: // NN
                        ClassificationModel = new weka.classifiers.functions.MultilayerPerceptron();
                        Text = "Neural Network - ";
                        break;
                    case 3: // KNN
                        ClassificationModel = new weka.classifiers.lazy.IBk((int)cGlobalInfo.OptionsWindow.numericUpDownKofKNN.Value);
                        Text = "K-Nearest Neighbor(s) - ";
                        break;
                    case 4: // Random Forest
                        ClassificationModel = new weka.classifiers.trees.RandomForest();
                        Text = "Random Forest - ";

                        break;
                    default:
                        break;
                }
                richTextBoxInfoClassif.AppendText(Text + InfoClass.NumberOfClass + " classes - Plate: ");

                richTextBoxInfoClassif.AppendText(CurrentPlateToProcess.GetName() + " OK \n");
                weka.core.Instances train = new weka.core.Instances(insts, 0, insts.numInstances());

                ClassificationModel.buildClassifier(train);
                cGlobalInfo.ConsoleWriteLine(ClassificationModel.ToString());

                weka.classifiers.Evaluation evaluation = new weka.classifiers.Evaluation(insts);
                evaluation.crossValidateModel(ClassificationModel, insts, 2, new java.util.Random(1));

                cGlobalInfo.ConsoleWriteLine(evaluation.toSummaryString());
                cGlobalInfo.ConsoleWriteLine(evaluation.toMatrixString());

                // update classification information of the current plate
                switch (IdxClassifier)
                {
                    case 0: // J48
                        weka.classifiers.trees.J48 CurrentClassifier = (weka.classifiers.trees.J48)(ClassificationModel);
                        CurrentPlateToProcess.GetInfoClassif().StringForTree = CurrentClassifier.graph().Remove(0, CurrentClassifier.graph().IndexOf("{") + 2);
                        break;
                    /*case 1: // SVM

                        break;
                    case 2: // NN

                        break;
                    case 3: // KNN

                        break;*/
                    default:
                        break;
                }

                CurrentPlateToProcess.GetInfoClassif().StringForQuality = evaluation.toSummaryString();
                CurrentPlateToProcess.GetInfoClassif().ConfusionMatrix = evaluation.toMatrixString();

                foreach (cWell TmpWell in CurrentPlateToProcess.ListActiveWells)
                {
                    weka.core.Instance currentInst = TmpWell.CreateInstanceForNClasses(InfoClass).instance(0);
                    double predictedClass = ClassificationModel.classifyInstance(currentInst);

                    TmpWell.SetClass(InfoClass.ListBackAssociation[(int)predictedClass]);
                }
            }
            return;
        }
예제 #11
0
파일: Classif.cs 프로젝트: cyrenaique/HCSA
        /// <summary>
        /// Global classification
        /// </summary>
        /// <param name="NeutralClass">Neutral class</param>
        /// <param name="IdxClassifier">Classifier Index (0:J48), (1:SVM), (2:NN), (3:KNN)</param>
        private void ClassificationGlobal(int NeutralClass, int IdxClassifier)
        {
            cInfoClass InfoClass = cGlobalInfo.CurrentScreening.GetNumberOfClassesBut(NeutralClass);

            if (InfoClass.NumberOfClass <= 1)
            {
                richTextBoxInfoClassif.AppendText("Screening not processed.\n");
                return;
            }

            cExtendedTable TrainingTable = cGlobalInfo.CurrentScreening.ListPlatesActive.GetListActiveWells().GetAverageDescriptorValues(cGlobalInfo.CurrentScreening.ListDescriptors.GetActiveDescriptors(), false, true);

            weka.core.Instances insts = cGlobalInfo.CurrentScreening.CreateInstancesWithClasses(InfoClass, NeutralClass);
            Classifier ClassificationModel = null;

            switch (IdxClassifier)
            {
                case 0: // J48
                    ClassificationModel = new weka.classifiers.trees.J48();
                    weka.classifiers.trees.J48 J48Model = (weka.classifiers.trees.J48)ClassificationModel;
                    J48Model.setMinNumObj((int)cGlobalInfo.OptionsWindow.numericUpDownJ48MinNumObjects.Value);
                    richTextBoxInfoClassif.AppendText("\nC4.5 : " + InfoClass.NumberOfClass + " classes");
                    break;
                case 1: // SVM
                    ClassificationModel = new weka.classifiers.functions.SMO();
                    break;
                case 2: // NN
                    ClassificationModel = new weka.classifiers.functions.MultilayerPerceptron();
                    break;
                case 3: // KNN
                    ClassificationModel = new weka.classifiers.lazy.IBk((int)cGlobalInfo.OptionsWindow.numericUpDownKofKNN.Value);
                    break;
                case 4: // Random Forest
                    ClassificationModel = new weka.classifiers.trees.RandomForest();
                    break;
                default:
                    break;
            }

            weka.core.Instances train = new weka.core.Instances(insts, 0, insts.numInstances());

            ClassificationModel.buildClassifier(train);
            cGlobalInfo.ConsoleWriteLine(ClassificationModel.ToString());

            weka.classifiers.Evaluation evaluation = new weka.classifiers.Evaluation(insts);
            evaluation.crossValidateModel(ClassificationModel, insts, 2, new java.util.Random(1));

            cGlobalInfo.ConsoleWriteLine(evaluation.toSummaryString());
            cGlobalInfo.ConsoleWriteLine(evaluation.toMatrixString());

            // update classification information of the current plate
            string Text = "";
            switch (IdxClassifier)
            {
                case 0: // J48
                    Text = "J48 - ";
                    break;
                case 1: // SVM
                    //  ClassificationModel = new weka.classifiers.functions.SMO();
                    Text = "SVM - ";
                    break;
                case 2: // NN
                    // ClassificationModel = new weka.classifiers.functions.MultilayerPerceptron();
                    Text = "Neural Network - ";
                    break;
                case 3: // KNN
                    // ClassificationModel = new weka.classifiers.lazy.IBk((int)CompleteScreening.GlobalInfo.OptionsWindow.numericUpDownKofKNN.Value);
                    Text = "K-Nearest Neighbor(s) - ";
                    break;
                default:
                    break;
            }
            richTextBoxInfoClassif.AppendText(Text + InfoClass.NumberOfClass + " classes.");

            // CurrentPlateToProcess.GetInfoClassif().StringForQuality = evaluation.toSummaryString();
            //  CurrentPlateToProcess.GetInfoClassif().ConfusionMatrix = evaluation.toMatrixString();
            foreach (cPlate CurrentPlateToProcess in cGlobalInfo.CurrentScreening.ListPlatesActive)
            {
                foreach (cWell TmpWell in CurrentPlateToProcess.ListActiveWells)
                {
                    // return;
                    weka.core.Instance currentInst = TmpWell.CreateInstanceForNClasses(InfoClass).instance(0);
                    double predictedClass = ClassificationModel.classifyInstance(currentInst);
                    double[] ClassConfidence = ClassificationModel.distributionForInstance(currentInst);
                    double ConfidenceValue = ClassConfidence[(int)predictedClass];
                    TmpWell.SetClass(InfoClass.ListBackAssociation[(int)predictedClass], ConfidenceValue);
                }
            }
            return;
        }
예제 #12
0
        private cExtendedTable GenerateArtifactMessage(cExtendedTable TmpTable, cPlate PlateToProcess)
        {
            cLinearize Lin = new cLinearize();
            Lin.SetInputData(TmpTable);
            Lin.Run();
            cExtendedTable LINTable = Lin.GetOutPut();

            cClustering Cluster = new cClustering();
            Cluster.SetInputData(LINTable);
            Cluster.ParamAlgoForClustering = this.ParamAlgoForClustering;
            if (Cluster.Run().IsSucceed == false) return null;

            this.ClusteredTable = Cluster.GetOutPut();

            // now clustering
            //if (!KMeans((int)cGlobalInfo.OptionsWindow.numericUpDownSystErrorIdentKMeansClasses.Value, PlateToProcess, CurrentDescSel))
            //{
            //    List<string> ListMessageError = new List<string>();
            //    ListMessageError.Add("K-Means Error");
            //    return ListMessageError;
            //}

            //// and finally classification

            //    return this.ComputePlateBasedClassification(MinObjectsNumber);

            //}

            cExtendedTable ET = PlateToProcess.ListWells.GetPositionRelatedSignatures();

            ET.Add(this.ClusteredTable[this.ClusteredTable.Count - 1]);

            weka.core.Instances insts = ET.CreateWekaInstancesWithClasses(); //CurrentPlateToProcess.CreateInstancesWithClassesWithPlateBasedDescriptor(Classes);
            weka.classifiers.trees.J48 ClassificationModel = new weka.classifiers.trees.J48();
            ClassificationModel.setMinNumObj(MinObjectsNumber);

            weka.core.Instances train = new weka.core.Instances(insts, 0, insts.numInstances());
            ClassificationModel.buildClassifier(train);

            string DotString = ClassificationModel.graph().Remove(0, ClassificationModel.graph().IndexOf("{") + 2);
            int DotLenght = DotString.Length;

            string NewDotString = DotString.Remove(DotLenght - 3, 3);

            // display the tree is requested
            //cDisplayTree DT = new cDisplayTree();
            //DT.SetInputData(NewDotString);
            //DT.Run();

            cExtendedTable ToReturn = new cExtendedTable(4, 1, 0);
            ToReturn.ListRowNames = new List<string>();
            ToReturn.ListTags = new List<object>();

            int CurrentPos = 0;
            int NextReturnPos = CurrentPos;
            List<int> ListNodeId = new List<int>();
            string TmpDotString = NewDotString;

            int TmpClass = 0;
            string ErrorString = "";
            int ErrorMessage = 0;

            ToReturn[0].Name = "Edge artifact";   // edge
            ToReturn[1].Name = "Column artifact";   // col
            ToReturn[2].Name = "Row artifact";   // row
            ToReturn[3].Name = "Bowl artifact";   // bowl

            #region build message
            while (NextReturnPos != -1)
            {
                int NextBracket = NewDotString.IndexOf("[");
                string StringToProcess = NewDotString.Remove(NextBracket - 1);
                string StringToProcess1 = StringToProcess.Remove(0, 1);

                if (StringToProcess1.Contains("N") == false)
                {
                    int Id = Convert.ToInt32(StringToProcess1);

                    int LabelPos = NewDotString.IndexOf("label=\"");
                    string LabelString = NewDotString.Remove(0, LabelPos + 7);
                    LabelPos = LabelString.IndexOf("\"");
                    string FinalLabel = LabelString.Remove(LabelPos);

                    // if (TmpClass < Classes)
                    {
                        if ((FinalLabel == "Dist_To_Border") || (FinalLabel == "Col_Pos") || (FinalLabel == "Row_Pos") || (FinalLabel == "Dist_To_Center"))
                        {
                            if ((FinalLabel == "Dist_To_Border") && (!ErrorString.Contains(" an edge effect")) && (!ErrorString.Contains(" a bowl effect")) && (ErrorMessage < 2))
                            {
                                if (TmpClass > 0) ErrorString += " combined with";
                                ErrorString += " an " + cGlobalInfo.ListArtifacts[0];
                                ErrorMessage++;
                                ToReturn[0][0] = 1;
                            }
                            else if ((FinalLabel == "Col_Pos") && (!ErrorString.Contains(" a column artifact")) && (ErrorMessage < 2))
                            {
                                if (TmpClass > 0) ErrorString += " combined with";
                                ErrorString += " a " + cGlobalInfo.ListArtifacts[1];
                                ErrorMessage++;
                                ToReturn[1][0] = 1;

                            }
                            else if ((FinalLabel == "Row_Pos") && (!ErrorString.Contains(" a row artifact")) && (ErrorMessage < 2))
                            {
                                if (TmpClass > 0) ErrorString += " combined with";
                                ErrorString += " a " + cGlobalInfo.ListArtifacts[2];
                                ErrorMessage++;
                                ToReturn[2][0] = 1;

                            }
                            else if ((FinalLabel == "Dist_To_Center") && (!ErrorString.Contains(" a bowl effect")) && (!ErrorString.Contains(" an edge effect")) && (ErrorMessage < 2))
                            {
                                if (TmpClass > 0) ErrorString += " combined with";
                                ErrorString += " a " + cGlobalInfo.ListArtifacts[3];
                                ErrorMessage++;
                                ToReturn[3][0] = 1;

                            }
                            TmpClass++;
                        }
                    }
                }

                NextReturnPos = NewDotString.IndexOf("\n");
                if (NextReturnPos != -1)
                {
                    string TmpString = NewDotString.Remove(0, NextReturnPos + 1);
                    NewDotString = TmpString;
                }
            }

            if (TmpClass == 0)
            {
                string NoError = "No systematic error detected !";
                ToReturn.ListTags.Add(NoError);
                //ToReturn.Add(NoError);
                return ToReturn;
            }

            string FinalString = "You have a systematic error !\nThis is " + ErrorString;

            NewDotString = TmpDotString;
            NextReturnPos = 0;
            while (NextReturnPos != -1)
            {
                int NextBracket = NewDotString.IndexOf("[");
                string StringToProcess = NewDotString.Remove(NextBracket - 1);
                string StringToProcess1 = StringToProcess.Remove(0, 1);

                if (StringToProcess1.Contains("N"))
                {
                    //// this is an edge
                    string stringNodeIdxStart = StringToProcess1.Remove(StringToProcess1.IndexOf("-"));
                    int NodeIdxStart = Convert.ToInt32(stringNodeIdxStart);

                    string stringNodeIdxEnd = StringToProcess1.Remove(0, StringToProcess1.IndexOf("N") + 1);
                    int NodeIdxSEnd = Convert.ToInt32(stringNodeIdxEnd);

                    int LabelPos = NewDotString.IndexOf("label=");
                    LabelPos += 7;

                    string CurrLabelString = NewDotString.Remove(0, LabelPos);
                }
                NextReturnPos = NewDotString.IndexOf("\n");

                if (NextReturnPos != -1)
                {
                    string TmpString = NewDotString.Remove(0, NextReturnPos + 1);
                    NewDotString = TmpString;
                }
            }

            ToReturn.ListTags.Add(FinalString + ".");
            #endregion

            return ToReturn;
        }
예제 #13
0
        public void classifyTest()
        {
            try
            {
                CSV2Arff();
                java.io.FileReader  arrfFile = new java.io.FileReader("D:/final_version/Gesture-Gis-master/GestureGis2/ComparisonFeaturefile.arff");
                weka.core.Instances insts    = new weka.core.Instances(arrfFile);
                //weka.core.Instances insts2 = new weka.core.Instances(new java.io.FileReader("D:/Gesture-Gis-master/GestureGis2/ComparisonFeaturefile.arff"));
                insts.setClassIndex(insts.numAttributes() - 1);

                //int percentSplit = 66;

                weka.classifiers.Classifier cl = new weka.classifiers.trees.J48();
                //Console.WriteLine("Performing " + percentSplit + "% split evaluation.");

                //randomize the order of the instances in the dataset.
                //weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                //myRandom.setInputFormat(insts);
                //insts = weka.filters.Filter.useFilter(insts, myRandom);
                int count                 = insts.numInstances();
                int trainSize             = count - 1;
                int testSize              = count - trainSize;
                weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);

                cl.buildClassifier(train);
                //weka.core.Instance current = insts2.instance(0);
                int numCorrect = 0;

                /*for (int i = trainSize; i < insts.numInstances(); i++)
                 * {
                 *  weka.core.Instance currentInst = insts.instance(i);
                 *  double predictedClass = cl.classifyInstance(currentInst);
                 *  if (predictedClass == insts.instance(i).classValue())
                 *      numCorrect++;
                 * }*/
                int index = count - 1;
                weka.core.Instance currentInst    = insts.instance(index);
                double             predictedClass = cl.classifyInstance(currentInst);
                int pre = (int)predictedClass;
                if (predictedClass == insts.instance(index).classValue())
                {
                    numCorrect++;
                }
                //insts.instance(index).classAttribute();
                //insts.attribute(11);
                string s = insts.toString();
                s = s.Substring(s.IndexOf("{") + 1);
                s = s.Substring(0, s.IndexOf("}"));
                s = s.Substring(0, s.Length);
                string[] ae = s.Split(',');

                /*ArrayList arr = new ArrayList();
                 * string path_class = @"D:\final_version\Gesture-Gis-master\GestureGis2\Classfile.txt";
                 * using (StreamReader reader = new StreamReader(path_class))
                 * {
                 *  while (!reader.EndOfStream)
                 *  {
                 *      arr.Add(reader.ReadLine());
                 *  }
                 *  reader.Close();
                 * }*/
                PredictedClassbyWeka = (string)(ae[pre]);
                arrfFile.close();

                //insts.instance(index).attribute(3);

                /*System.Diagnostics.Debug.WriteLine(numCorrect + " out of " + testSize + " correct (" +
                 *         (double)((double)numCorrect / (double)testSize * 100.0) + "%)");
                 * Console.WriteLine(numCorrect + " out of " + testSize + " correct (" +
                 *         (double)((double)numCorrect / (double)testSize * 100.0) + "%)");*/
            }
            catch (java.lang.Exception ex)
            {
                ex.printStackTrace();
            }
        }