コード例 #1
0
        /// <summary>
        /// Turns the string name of a classifier into a weka classifier of the appropriate type. Also deals with options.
        /// </summary>
        /// <param name="classifier">The classifier to use.</param>
        /// <returns>A weka compatable classifier</returns>
        private static weka.classifiers.Classifier GetClassifier(Classifier myClassifier)
        {
            switch (myClassifier)
            {
            case Classifier.J48:
                return(new weka.classifiers.trees.J48());

            case Classifier.NaiveBayes:
                return(new weka.classifiers.bayes.NaiveBayes());

            case Classifier.MLP:
                // http://weka.sourceforge.net/doc/weka/classifiers/functions/MultilayerPerceptron.html#setOptions(java.lang.String[])
                weka.classifiers.functions.MultilayerPerceptron cls = new weka.classifiers.functions.MultilayerPerceptron();
                cls.setOptions(new string[] { "-H", "a" });
                return(cls);

            case Classifier.AdaBoost_J48:
                weka.classifiers.meta.AdaBoostM1 cls_ab_j48 = new weka.classifiers.meta.AdaBoostM1();
                cls_ab_j48.setOptions(new string[] { "-W", "weka.classifiers.trees.J48" });
                return(cls_ab_j48);

            case Classifier.AdaBoost_Stump:
                weka.classifiers.meta.AdaBoostM1 cls_ab_stump = new weka.classifiers.meta.AdaBoostM1();
                cls_ab_stump.setOptions(new string[] { "-W", "weka.classifiers.trees.DecisionStump" });
                return(cls_ab_stump);

            default:
                Console.WriteLine("You did not specify a supported classifier type.");
                return(null);
            }
        }
コード例 #2
0
ファイル: ML.cs プロジェクト: jny0714mj/BatteryEstimation
        public static string WEKA_GETMLP(weka.core.Instances insts)
        {
            string THEOUTPUT = " ";

            try
            {
                insts.setClassIndex(insts.numAttributes() - 1);
                weka.classifiers.functions.MultilayerPerceptron mlp = new weka.classifiers.functions.MultilayerPerceptron();

                //SETTING PARAMETERS
                weka.core.Utils.splitOptions("-L 0.3 -M 0.2 -N 500 -V 0 -S 0 -E 20 -H 3");
                mlp.buildClassifier(insts);


                THEOUTPUT = mlp.ToString();
            }
            catch (java.lang.Exception ex)
            {
                ex.printStackTrace();
            }

            return(THEOUTPUT);

            //new Program().Method3
        }
コード例 #3
0
        public List <double> testMLPUsingWeka(string[] attributeArray, string[] classNames, double[] dataValues, string classHeader, string defaultclass, string modelName, int hiddelLayers = 7, double learningRate = 0.03, double momentum = 0.4, int decimalPlaces = 2, int trainingTime = 1000)
        {
            java.util.ArrayList classLabel = new java.util.ArrayList();
            foreach (string className in classNames)
            {
                classLabel.Add(className);
            }
            weka.core.Attribute classHeaderName = new weka.core.Attribute(classHeader, classLabel);

            java.util.ArrayList attributeList = new java.util.ArrayList();
            foreach (string attribute in attributeArray)
            {
                weka.core.Attribute newAttribute = new weka.core.Attribute(attribute);
                attributeList.Add(newAttribute);
            }
            attributeList.add(classHeaderName);
            weka.core.Instances data = new weka.core.Instances("TestInstances", attributeList, 0);
            data.setClassIndex(data.numAttributes() - 1);
            // Set instance's values for the attributes
            weka.core.Instance inst_co = new DenseInstance(data.numAttributes());
            for (int i = 0; i < data.numAttributes() - 1; i++)
            {
                inst_co.setValue(i, dataValues.ElementAt(i));
            }

            inst_co.setValue(classHeaderName, defaultclass);
            data.add(inst_co);

            java.io.File path = new java.io.File("/models/");
            weka.classifiers.functions.MultilayerPerceptron clRead = loadModel(modelName, path);
            clRead.setHiddenLayers(hiddelLayers.ToString());
            clRead.setLearningRate(learningRate);
            clRead.setMomentum(momentum);
            clRead.setNumDecimalPlaces(decimalPlaces);
            clRead.setTrainingTime(trainingTime);
            weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
            myRandom.setInputFormat(data);
            data = weka.filters.Filter.useFilter(data, myRandom);
            double classValue = clRead.classifyInstance(data.get(0));

            double[]      predictionDistribution  = clRead.distributionForInstance(data.get(0));
            List <double> predictionDistributions = new List <double>();

            for (int predictionDistributionIndex = 0;
                 predictionDistributionIndex < predictionDistribution.Count();
                 predictionDistributionIndex++)
            {
                string classValueString1 = classLabel.get(predictionDistributionIndex).ToString();
                double prob = predictionDistribution[predictionDistributionIndex] * 100;
                predictionDistributions.Add(prob);
            }
            List <double> prediction = new List <double>();

            prediction.Add(classValue);
            prediction.AddRange(predictionDistributions);
            return(prediction);
        }
コード例 #4
0
ファイル: WekaClassification.cs プロジェクト: skn123/iFourmi
        public static Classifier GetWekaClassifier(string algorithm, string trainingSetPath)
        {
            Classifier classifier = null;

            switch (algorithm)
            {
            case "KNN":
            {
                classifier = new weka.classifiers.lazy.IB1();
                WekaNETBridge.WekaClassification wekaClassification = new WekaNETBridge.WekaClassification(trainingSetPath, classifier);
                classifier = wekaClassification.CreateClassifier();
            }
            break;

            case "NBayes":
            {
                classifier = new weka.classifiers.bayes.NaiveBayes();
                WekaNETBridge.WekaClassification wekaClassification = new WekaNETBridge.WekaClassification(trainingSetPath, classifier);
                classifier = wekaClassification.CreateClassifier();
            }
            break;

            case "JRip":
            {
                classifier = new weka.classifiers.rules.JRip();
                WekaNETBridge.WekaClassification wekaClassification = new WekaNETBridge.WekaClassification(trainingSetPath, classifier);
                classifier = wekaClassification.CreateClassifier();
            }
            break;

            case "J48":
            {
                classifier = new weka.classifiers.trees.J48();
                WekaNETBridge.WekaClassification wekaClassification = new WekaNETBridge.WekaClassification(trainingSetPath, classifier);
                classifier = wekaClassification.CreateClassifier();
            }
            break;

            case "NeuralNets":
            {
                classifier = new weka.classifiers.functions.MultilayerPerceptron();
                WekaNETBridge.WekaClassification wekaClassification = new WekaNETBridge.WekaClassification(trainingSetPath, classifier);
                classifier = wekaClassification.CreateClassifier();
            }
            break;

            case "SVM":
            {
                classifier = new weka.classifiers.functions.SMO();
                WekaNETBridge.WekaClassification wekaClassification = new WekaNETBridge.WekaClassification(trainingSetPath, classifier);
                classifier = wekaClassification.CreateClassifier();
            }
            break;
            }

            return(classifier);
        }
コード例 #5
0
        public static void CalculateSuccessForAnn(weka.core.Instances originalInsts)
        {
            try
            {
                var form = Form.ActiveForm as Form1;

                form.successPrcAnn.Text = "Training...";
                form.successRtAnn.Text  = "../" + testSize;

                weka.core.Instances insts = originalInsts;

                // Pre-process
                insts = ConvertNominalToNumeric(insts);
                insts = Normalize(insts);

                // Classify
                weka.classifiers.Classifier cl    = new weka.classifiers.functions.MultilayerPerceptron();
                weka.core.Instances         train = new weka.core.Instances(insts, 0, trainSize);
                cl.buildClassifier(train);

                int    numCorrect = 0;
                double percentage = 0;
                for (int i = trainSize; i < insts.numInstances(); i++)
                {
                    weka.core.Instance currentInst    = insts.instance(i);
                    double             predictedClass = cl.classifyInstance(currentInst);
                    if (predictedClass == insts.instance(i).classValue())
                    {
                        numCorrect++;
                    }

                    percentage              = (double)numCorrect / (double)testSize * 100.0;
                    form.successRtAnn.Text  = numCorrect + "/" + testSize;
                    form.successPrcAnn.Text = String.Format("{0:0.00}", percentage) + "%";
                }
                succesRates.Add(Classifier.ANN, percentage);
                classifiers.Add(Classifier.ANN, cl);
            }
            catch (java.lang.Exception ex)
            {
                ex.printStackTrace();
                MessageBox.Show(ex.ToString(), "Error for Neural Network", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
            catch (Exception)
            {
                MessageBox.Show("Error for  Neural Network", "Error for  Neural Network", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
        }
コード例 #6
0
        public void trainMachineForEmotionUsingWeka(string wekaFile, string modelName, int hiddelLayers = 7, double learningRate = 0.03, double momentum = 0.4, int decimalPlaces = 2, int trainingTime = 1000)
        {
            //"C:\\Users\\Gulraiz\\Desktop\\Genereted2.arff" "MLP"
            try
            {
                weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader(wekaFile));
                insts.setClassIndex(insts.numAttributes() - 1);
                weka.classifiers.functions.MultilayerPerceptron cl;
                cl = new weka.classifiers.functions.MultilayerPerceptron();
                cl.setHiddenLayers(hiddelLayers.ToString());
                cl.setLearningRate(learningRate);
                cl.setMomentum(momentum);
                cl.setNumDecimalPlaces(decimalPlaces);
                cl.setTrainingTime(trainingTime);

                System.Console.WriteLine("Performing " + percentSplit + "% split evaluation.");

                //randomize the order of the instances in the dataset.
                weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                myRandom.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myRandom);

                int trainSize             = insts.numInstances() * percentSplit / 100;
                int testSize              = insts.numInstances() - trainSize;
                weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);
                java.io.File        path  = new java.io.File("/models/");
                cl.buildClassifier(train);
                saveModel(cl, modelName, path);
                #region test whole set
                //int numCorrect = 0;
                //for (int i = trainSize; i < insts.numInstances(); i++)
                //{
                //    weka.core.Instance currentInst = insts.instance(i);
                //    double predictedClass = cl.classifyInstance(currentInst);
                //    if (predictedClass == insts.instance(i).classValue())
                //        numCorrect++;
                //}

                //System.Console.WriteLine(numCorrect + " out of " + testSize + " correct (" +
                //           (double)((double)numCorrect / (double)testSize * 100.0) + "%)");
                #endregion
            }
            catch (java.lang.Exception ex)
            {
                ex.printStackTrace();
            }
        }
コード例 #7
0
        public static string classifyTest(string file, string classifier)
        {
            string data = "No data";

            try
            {
                //weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader("C:\\Users\\kinli\\source\\repos\\WebApplication2\\WebApplication2\\iris.arff"));
                weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader(file));
                insts.setClassIndex(insts.numAttributes() - 1);
                weka.classifiers.Classifier cl = new weka.classifiers.trees.J48();

                if (classifier == "J48")
                {
                    cl = new weka.classifiers.trees.J48();
                }
                else if (classifier == "MLP")
                {
                    cl = new weka.classifiers.functions.MultilayerPerceptron();
                }
                else if (classifier == "NaiveBayes")
                {
                    cl = new weka.classifiers.bayes.NaiveBayes();
                }

                //data = ("Performing " + percentSplit + "% split evaluation.\n");
                data = ("Performing use training set evaluation.\n");
                //randomize the order of the instances in the dataset.

                weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                myRandom.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myRandom);

                /*
                 * int trainSize = insts.numInstances() * percentSplit / 100;
                 * int testSize = insts.numInstances() - trainSize;
                 * weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);
                 *
                 * cl.buildClassifier(train);
                 * int numCorrect = 0;
                 * for (int i = trainSize; i < insts.numInstances(); i++)
                 * {
                 *  weka.core.Instance currentInst = insts.instance(i);
                 *  double predictedClass = cl.classifyInstance(currentInst);
                 *  if (predictedClass == insts.instance(i).classValue())
                 *      numCorrect++;
                 * }*/
                cl.buildClassifier(insts);

                int numCorrect = 0;
                for (int i = 0; i < insts.numInstances(); i++)
                {
                    weka.core.Instance currentInst    = insts.instance(i);
                    double             predictedClass = cl.classifyInstance(currentInst);
                    if (predictedClass == insts.instance(i).classValue())
                    {
                        numCorrect++;
                    }
                }
                data = data + (numCorrect + " out of " + insts.numInstances() + " correct (" +
                               (double)((double)numCorrect / (double)insts.numInstances() * 100.0) + "%)");
            }
            catch (java.lang.Exception ex)
            {
                data = "Error";
                ex.printStackTrace();
            }
            return(data);
        }
コード例 #8
0
    // Test the classification result of each map that a user played,
    // with the data available as if they were playing through it
    public static void classifyTest(String dataString, String playerID)
    {
        String results = "";

        try {
            java.io.StringReader   stringReader = new java.io.StringReader(dataString);
            java.io.BufferedReader buffReader   = new java.io.BufferedReader(stringReader);

            /* NOTE THAT FOR NAIVE BAYES ALL WEIGHTS CAN BE = 1*/
            //weka.core.converters.ConverterUtils.DataSource source = new weka.core.converters.ConverterUtils.DataSource("iris.arff");
            weka.core.Instances data = new weka.core.Instances(buffReader);             //source.getDataSet();
            // setting class attribute if the data format does not provide this information
            // For example, the XRFF format saves the class attribute information as well
            if (data.classIndex() == -1)
            {
                data.setClassIndex(data.numAttributes() - 1);
            }

            weka.classifiers.Classifier cl;
            for (int i = 2; i <= data.numInstances(); i++)
            {
                //cl = new weka.classifiers.bayes.NaiveBayes();
                //cl = new weka.classifiers.trees.J48();
                //cl = new weka.classifiers.lazy.IB1();
                cl = new weka.classifiers.functions.MultilayerPerceptron();
                ((weka.classifiers.functions.MultilayerPerceptron)cl).setHiddenLayers("12");
                //cl = new weka.classifiers.trees.RandomForest();

                weka.core.Instances subset = new weka.core.Instances(data, 0, i);
                cl.buildClassifier(subset);

                weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(subset);
                eval.crossValidateModel(cl, subset, subset.numInstances(), new java.util.Random(1));
                results = results + eval.pctCorrect();                 // For accuracy measurement
                /* For Mathews Correlation Coefficient */
                //double TP = eval.numTruePositives(1);
                //double FP = eval.numFalsePositives(1);
                //double TN = eval.numTrueNegatives(1);
                //double FN = eval.numFalseNegatives(1);
                //double correlationCoeff = ((TP*TN)-(FP*FN))/Math.Sqrt((TP+FP)*(TP+FN)*(TN+FP)*(TN+FN));
                //results = results + correlationCoeff;
                if (i != data.numInstances())
                {
                    results = results + ", ";
                }
                if (i == data.numInstances())
                {
                    Debug.Log("Player: " + playerID + ", Num Maps: " + data.numInstances() + ", AUC: " + eval.areaUnderROC(1));
                }
            }
        } catch (java.lang.Exception ex)
        {
            Debug.LogError(ex.getMessage());
        }
        // Write values to file for a matlab read
        // For accuracy
        //StreamWriter writer = new StreamWriter("DataForMatlab/"+playerID+"_CrossFoldValidations_RandomForest.txt");
        StreamWriter writer = new StreamWriter("DataForMatlab/" + playerID + "_LOOCrossFold_NeuralNet.txt");

        //StreamWriter writer = new StreamWriter("DataForMatlab/"+playerID+"_CrossFoldCorrCoeff.txt"); // For mathews cc
        writer.WriteLine(results);
        writer.Close();
        Debug.Log(playerID + " has been written to file");
    }
コード例 #9
0
ファイル: Classif.cs プロジェクト: cyrenaique/HCSA
        /// <summary>
        /// Plate by plate classification
        /// </summary>
        /// <param name="NeutralClass">Neutral class</param>
        /// <param name="IdxClassifier">Classifier Index (0:J48), (1:SVM), (2:NN), (3:KNN)</param>
        private void ClassificationPlateByPlate(int NeutralClass, int IdxClassifier)
        {
            int NumberOfPlates = cGlobalInfo.CurrentScreening.ListPlatesActive.Count;

            for (int PlateIdx = 0; PlateIdx < NumberOfPlates; PlateIdx++)
            {
                cPlate CurrentPlateToProcess = cGlobalInfo.CurrentScreening.ListPlatesActive.GetPlate(cGlobalInfo.CurrentScreening.ListPlatesActive[PlateIdx].GetName());
                cInfoClass InfoClass = CurrentPlateToProcess.GetNumberOfClassesBut(NeutralClass);
                // return;
                if (InfoClass.NumberOfClass <= 1)
                {
                    richTextBoxInfoClassif.AppendText(CurrentPlateToProcess.GetName() + " not processed.\n");
                    continue;
                }

                weka.core.Instances insts = CurrentPlateToProcess.CreateInstancesWithClasses(InfoClass, NeutralClass);
                Classifier ClassificationModel = null;
                string Text = "";
                switch (IdxClassifier)
                {
                    case 0: // J48
                        ClassificationModel = new weka.classifiers.trees.J48();
                        weka.classifiers.trees.J48 J48Model = (weka.classifiers.trees.J48)ClassificationModel;
                        J48Model.setMinNumObj((int)cGlobalInfo.OptionsWindow.numericUpDownJ48MinNumObjects.Value);
                        Text = "J48 - ";
                        break;
                    case 1: // SVM
                        ClassificationModel = new weka.classifiers.functions.SMO();
                        Text = "SVM - ";
                        break;
                    case 2: // NN
                        ClassificationModel = new weka.classifiers.functions.MultilayerPerceptron();
                        Text = "Neural Network - ";
                        break;
                    case 3: // KNN
                        ClassificationModel = new weka.classifiers.lazy.IBk((int)cGlobalInfo.OptionsWindow.numericUpDownKofKNN.Value);
                        Text = "K-Nearest Neighbor(s) - ";
                        break;
                    case 4: // Random Forest
                        ClassificationModel = new weka.classifiers.trees.RandomForest();
                        Text = "Random Forest - ";

                        break;
                    default:
                        break;
                }
                richTextBoxInfoClassif.AppendText(Text + InfoClass.NumberOfClass + " classes - Plate: ");

                richTextBoxInfoClassif.AppendText(CurrentPlateToProcess.GetName() + " OK \n");
                weka.core.Instances train = new weka.core.Instances(insts, 0, insts.numInstances());

                ClassificationModel.buildClassifier(train);
                cGlobalInfo.ConsoleWriteLine(ClassificationModel.ToString());

                weka.classifiers.Evaluation evaluation = new weka.classifiers.Evaluation(insts);
                evaluation.crossValidateModel(ClassificationModel, insts, 2, new java.util.Random(1));

                cGlobalInfo.ConsoleWriteLine(evaluation.toSummaryString());
                cGlobalInfo.ConsoleWriteLine(evaluation.toMatrixString());

                // update classification information of the current plate
                switch (IdxClassifier)
                {
                    case 0: // J48
                        weka.classifiers.trees.J48 CurrentClassifier = (weka.classifiers.trees.J48)(ClassificationModel);
                        CurrentPlateToProcess.GetInfoClassif().StringForTree = CurrentClassifier.graph().Remove(0, CurrentClassifier.graph().IndexOf("{") + 2);
                        break;
                    /*case 1: // SVM

                        break;
                    case 2: // NN

                        break;
                    case 3: // KNN

                        break;*/
                    default:
                        break;
                }

                CurrentPlateToProcess.GetInfoClassif().StringForQuality = evaluation.toSummaryString();
                CurrentPlateToProcess.GetInfoClassif().ConfusionMatrix = evaluation.toMatrixString();

                foreach (cWell TmpWell in CurrentPlateToProcess.ListActiveWells)
                {
                    weka.core.Instance currentInst = TmpWell.CreateInstanceForNClasses(InfoClass).instance(0);
                    double predictedClass = ClassificationModel.classifyInstance(currentInst);

                    TmpWell.SetClass(InfoClass.ListBackAssociation[(int)predictedClass]);
                }
            }
            return;
        }
コード例 #10
0
ファイル: Classif.cs プロジェクト: cyrenaique/HCSA
        /// <summary>
        /// Global classification
        /// </summary>
        /// <param name="NeutralClass">Neutral class</param>
        /// <param name="IdxClassifier">Classifier Index (0:J48), (1:SVM), (2:NN), (3:KNN)</param>
        private void ClassificationGlobal(int NeutralClass, int IdxClassifier)
        {
            cInfoClass InfoClass = cGlobalInfo.CurrentScreening.GetNumberOfClassesBut(NeutralClass);

            if (InfoClass.NumberOfClass <= 1)
            {
                richTextBoxInfoClassif.AppendText("Screening not processed.\n");
                return;
            }

            cExtendedTable TrainingTable = cGlobalInfo.CurrentScreening.ListPlatesActive.GetListActiveWells().GetAverageDescriptorValues(cGlobalInfo.CurrentScreening.ListDescriptors.GetActiveDescriptors(), false, true);

            weka.core.Instances insts = cGlobalInfo.CurrentScreening.CreateInstancesWithClasses(InfoClass, NeutralClass);
            Classifier ClassificationModel = null;

            switch (IdxClassifier)
            {
                case 0: // J48
                    ClassificationModel = new weka.classifiers.trees.J48();
                    weka.classifiers.trees.J48 J48Model = (weka.classifiers.trees.J48)ClassificationModel;
                    J48Model.setMinNumObj((int)cGlobalInfo.OptionsWindow.numericUpDownJ48MinNumObjects.Value);
                    richTextBoxInfoClassif.AppendText("\nC4.5 : " + InfoClass.NumberOfClass + " classes");
                    break;
                case 1: // SVM
                    ClassificationModel = new weka.classifiers.functions.SMO();
                    break;
                case 2: // NN
                    ClassificationModel = new weka.classifiers.functions.MultilayerPerceptron();
                    break;
                case 3: // KNN
                    ClassificationModel = new weka.classifiers.lazy.IBk((int)cGlobalInfo.OptionsWindow.numericUpDownKofKNN.Value);
                    break;
                case 4: // Random Forest
                    ClassificationModel = new weka.classifiers.trees.RandomForest();
                    break;
                default:
                    break;
            }

            weka.core.Instances train = new weka.core.Instances(insts, 0, insts.numInstances());

            ClassificationModel.buildClassifier(train);
            cGlobalInfo.ConsoleWriteLine(ClassificationModel.ToString());

            weka.classifiers.Evaluation evaluation = new weka.classifiers.Evaluation(insts);
            evaluation.crossValidateModel(ClassificationModel, insts, 2, new java.util.Random(1));

            cGlobalInfo.ConsoleWriteLine(evaluation.toSummaryString());
            cGlobalInfo.ConsoleWriteLine(evaluation.toMatrixString());

            // update classification information of the current plate
            string Text = "";
            switch (IdxClassifier)
            {
                case 0: // J48
                    Text = "J48 - ";
                    break;
                case 1: // SVM
                    //  ClassificationModel = new weka.classifiers.functions.SMO();
                    Text = "SVM - ";
                    break;
                case 2: // NN
                    // ClassificationModel = new weka.classifiers.functions.MultilayerPerceptron();
                    Text = "Neural Network - ";
                    break;
                case 3: // KNN
                    // ClassificationModel = new weka.classifiers.lazy.IBk((int)CompleteScreening.GlobalInfo.OptionsWindow.numericUpDownKofKNN.Value);
                    Text = "K-Nearest Neighbor(s) - ";
                    break;
                default:
                    break;
            }
            richTextBoxInfoClassif.AppendText(Text + InfoClass.NumberOfClass + " classes.");

            // CurrentPlateToProcess.GetInfoClassif().StringForQuality = evaluation.toSummaryString();
            //  CurrentPlateToProcess.GetInfoClassif().ConfusionMatrix = evaluation.toMatrixString();
            foreach (cPlate CurrentPlateToProcess in cGlobalInfo.CurrentScreening.ListPlatesActive)
            {
                foreach (cWell TmpWell in CurrentPlateToProcess.ListActiveWells)
                {
                    // return;
                    weka.core.Instance currentInst = TmpWell.CreateInstanceForNClasses(InfoClass).instance(0);
                    double predictedClass = ClassificationModel.classifyInstance(currentInst);
                    double[] ClassConfidence = ClassificationModel.distributionForInstance(currentInst);
                    double ConfidenceValue = ClassConfidence[(int)predictedClass];
                    TmpWell.SetClass(InfoClass.ListBackAssociation[(int)predictedClass], ConfidenceValue);
                }
            }
            return;
        }