Exemple #1
0
        public static Classifier GetWekaClassifier(string algorithm, string trainingSetPath)
        {
            Classifier classifier = null;

            switch (algorithm)
            {
            case "KNN":
            {
                classifier = new weka.classifiers.lazy.IB1();
                WekaNETBridge.WekaClassification wekaClassification = new WekaNETBridge.WekaClassification(trainingSetPath, classifier);
                classifier = wekaClassification.CreateClassifier();
            }
            break;

            case "NBayes":
            {
                classifier = new weka.classifiers.bayes.NaiveBayes();
                WekaNETBridge.WekaClassification wekaClassification = new WekaNETBridge.WekaClassification(trainingSetPath, classifier);
                classifier = wekaClassification.CreateClassifier();
            }
            break;

            case "JRip":
            {
                classifier = new weka.classifiers.rules.JRip();
                WekaNETBridge.WekaClassification wekaClassification = new WekaNETBridge.WekaClassification(trainingSetPath, classifier);
                classifier = wekaClassification.CreateClassifier();
            }
            break;

            case "J48":
            {
                classifier = new weka.classifiers.trees.J48();
                WekaNETBridge.WekaClassification wekaClassification = new WekaNETBridge.WekaClassification(trainingSetPath, classifier);
                classifier = wekaClassification.CreateClassifier();
            }
            break;

            case "NeuralNets":
            {
                classifier = new weka.classifiers.functions.MultilayerPerceptron();
                WekaNETBridge.WekaClassification wekaClassification = new WekaNETBridge.WekaClassification(trainingSetPath, classifier);
                classifier = wekaClassification.CreateClassifier();
            }
            break;

            case "SVM":
            {
                classifier = new weka.classifiers.functions.SMO();
                WekaNETBridge.WekaClassification wekaClassification = new WekaNETBridge.WekaClassification(trainingSetPath, classifier);
                classifier = wekaClassification.CreateClassifier();
            }
            break;
            }

            return(classifier);
        }
Exemple #2
0
        public static void CalculateSuccessForSvm(weka.core.Instances originalInsts)
        {
            try
            {
                var form = Form.ActiveForm as Form1;
                form.successPrcSvm.Text = "Training...";
                form.successRtSvm.Text  = "../" + testSize;

                weka.core.Instances insts = originalInsts;

                // Pre-process
                insts = ConvertNominalToNumeric(insts);
                insts = Normalize(insts);

                // Classify
                weka.classifiers.Classifier cl    = new weka.classifiers.functions.SMO();
                weka.core.Instances         train = new weka.core.Instances(insts, 0, trainSize);
                cl.buildClassifier(train);

                int    numCorrect = 0;
                double percentage = 0;
                for (int i = trainSize; i < insts.numInstances(); i++)
                {
                    weka.core.Instance currentInst    = insts.instance(i);
                    double             predictedClass = cl.classifyInstance(currentInst);
                    if (predictedClass == insts.instance(i).classValue())
                    {
                        numCorrect++;
                    }

                    percentage              = (double)numCorrect / (double)testSize * 100.0;
                    form.successRtSvm.Text  = numCorrect + "/" + testSize;
                    form.successPrcSvm.Text = String.Format("{0:0.00}", percentage) + "%";
                }
                succesRates.Add(Classifier.SVM, percentage);
                classifiers.Add(Classifier.SVM, cl);
            }
            catch (java.lang.Exception ex)
            {
                ex.printStackTrace();
                MessageBox.Show(ex.ToString(), "Error for SVM", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
            catch (Exception)
            {
                MessageBox.Show("Error for  SVM", "Error for SVM", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
        }
        /// <summary>
        /// Filtered Support Vector Machine Classification with type specified. i.e. BOF or BOW
        /// </summary>
        /// <param name="type"></param>
        private void FilteredSVM(string type, string trainingFilePath, string directoryName, TextFilterType textFilterType)
        {
            var currDir = System.Environment.GetFolderPath(System.Environment.SpecialFolder.ApplicationData);

            // Combine the base folder with your specific folder....
            string specificFolder = System.IO.Path.Combine(currDir, "MARC 2.0");

            // Check if folder exists and if not, create it
            if (!Directory.Exists(specificFolder))
            {
                Directory.CreateDirectory(specificFolder);
            }



            try
            {
                var trainingDatatsetFilePath = "";
                if (type == "BOF")
                {
                    trainingDatatsetFilePath = specificFolder + "\\InputData\\TrainingDatasets\\BOF Dataset.arff";
                }
                else
                {
                    trainingDatatsetFilePath = specificFolder + "\\InputData\\TrainingDatasets\\BOW Dataset.arff";
                }

                var testDatasetFilePath = specificFolder + "\\InputData\\TrainingDatasets\\Test.arff";

                // If training file path is supplied then use it.
                if (trainingFilePath != null)
                {
                    trainingDatatsetFilePath = trainingFilePath;
                }

                java.io.BufferedReader trainReader    = new BufferedReader(new FileReader(trainingDatatsetFilePath)); //File with text examples
                BufferedReader         classifyReader = new BufferedReader(new FileReader(testDatasetFilePath));      //File with text to classify

                Instances trainInsts    = new Instances(trainReader);
                Instances classifyInsts = new Instances(classifyReader);

                trainInsts.setClassIndex(trainInsts.numAttributes() - 1);
                classifyInsts.setClassIndex(classifyInsts.numAttributes() - 1);

                FilteredClassifier model = new FilteredClassifier();

                StringToWordVector stringtowordvector = new StringToWordVector();
                stringtowordvector.setTFTransform(true);
                model.setFilter(new StringToWordVector());

                weka.classifiers.Classifier smocls = new weka.classifiers.functions.SMO();

                //smocls.setOptions(weka.core.Utils.splitOptions("-C 1.0 -L 0.001 -P 1.0E-12 -N 0 -V -1 -W 1 -K \"weka.classifiers.functions.supportVector.Puk -C 250007 -O 1.0 -S 1.0\""));
                smocls.setOptions(weka.core.Utils.splitOptions("-C 1.0 -L 0.0010 -P 1.0E-12 -N 0 -V -1 -W 1 -K \"weka.classifiers.functions.supportVector.PolyKernel -C 250007 -E 1.0\""));
                model.setClassifier(smocls);

                bool exists;
                var  directoryRoot = System.IO.Path.GetDirectoryName(Directory.GetCurrentDirectory());
                directoryRoot = specificFolder;
                //Check if the model exists and if not then build a model
                switch (textFilterType)
                {
                case TextFilterType.NoFilter:
                    exists = SVMNoFilterCheckifModelExists(trainingDatatsetFilePath);

                    //if does not exists then build model and save it and save the file also for current filter
                    if (!exists)
                    {
                        model.buildClassifier(trainInsts);
                        Helper.Helper.WriteToBinaryFile <FilteredClassifier>(directoryRoot + @"\Model\SVM\SVMNoFilterModel.dat", model);
                        string content = System.IO.File.ReadAllText(trainingDatatsetFilePath);
                        using (var sW = new StreamWriter(directoryRoot + @"\Model\SVM\\SVMNoFilterFile.dat"))
                        {
                            sW.Write(content);
                        }
                    }
                    // if exists then read the file and use the model
                    else
                    {
                        model = Helper.Helper.ReadFromBinaryFile <FilteredClassifier>(directoryRoot + @"\Model\SVM\SVMNoFilterModel.dat");
                    }

                    break;

                //Case Stopwords Removal
                case TextFilterType.StopwordsRemoval:
                    exists = SVMSWRCheckifModelExists(trainingDatatsetFilePath);
                    //if does not exists then build model and save it and save the file also for current filter
                    if (!exists)
                    {
                        model.buildClassifier(trainInsts);
                        Helper.Helper.WriteToBinaryFile <FilteredClassifier>(directoryRoot + @"\Model\SVM\SVMSWRFilterModel.dat", model);
                        string content = System.IO.File.ReadAllText(trainingDatatsetFilePath);
                        using (var sW = new StreamWriter(directoryRoot + @"\Model\SVM\\SVMSWRFile.dat"))
                        {
                            sW.Write(content);
                        }
                    }
                    // if exists then read the file and use the model
                    else
                    {
                        model = Helper.Helper.ReadFromBinaryFile <FilteredClassifier>(directoryRoot + @"\Model\SVM\SVMSWRFilterModel.dat");
                    }

                    break;

                //Case Stemming
                case TextFilterType.Stemming:
                    exists = SVMSTCheckifModelExists(trainingDatatsetFilePath);
                    //if does not exists then build model and save it and save the file also for current filter
                    if (!exists)
                    {
                        model.buildClassifier(trainInsts);
                        Helper.Helper.WriteToBinaryFile <FilteredClassifier>(directoryRoot + @"\Model\SVM\SVMSTFilterModel.dat", model);
                        string content = System.IO.File.ReadAllText(trainingDatatsetFilePath);
                        using (var sW = new StreamWriter(directoryRoot + @"\Model\SVM\\SVMSTFile.dat"))
                        {
                            sW.Write(content);
                        }
                    }
                    // if exists then read the file and use the model
                    else
                    {
                        model = Helper.Helper.ReadFromBinaryFile <FilteredClassifier>(directoryRoot + @"\Model\SVM\SVMSTFilterModel.dat");
                    }
                    break;

                //Case Stopwords Removal with Stemming
                case TextFilterType.StopwordsRemovalStemming:
                    exists = SVMSWRSTCheckifModelExists(trainingDatatsetFilePath);
                    //if does not exists then build model and save it and save the file also for current filter
                    if (!exists)
                    {
                        model.buildClassifier(trainInsts);
                        Helper.Helper.WriteToBinaryFile <FilteredClassifier>(directoryRoot + @"\Model\SVM\SVMSWRSTFilterModel.dat", model);
                        string content = System.IO.File.ReadAllText(trainingDatatsetFilePath);
                        using (var sW = new StreamWriter(directoryRoot + @"\Model\SVM\\SVMSWRSTFile.dat"))
                        {
                            sW.Write(content);
                        }
                    }
                    // if exists then read the file and use the model
                    else
                    {
                        model = Helper.Helper.ReadFromBinaryFile <FilteredClassifier>(directoryRoot + @"\Model\SVM\SVMSWRSTFilterModel.dat");
                    }
                    break;

                default:
                    break;
                }

                //model.buildClassifier(trainInsts);
                for (int i = 0; i < classifyInsts.numInstances(); i++)
                {
                    classifyInsts.instance(i).setClassMissing();
                    double cls = model.classifyInstance(classifyInsts.instance(i));
                    classifyInsts.instance(i).setClassValue(cls);
                    classification = cls == 0 ? "Bug Report"
                                    : cls == 1 ? "Feature Request"
                                    : "Other";
                    tempAllClassification.Add(classification);
                }
                AllClassification = tempAllClassification;
            }
            catch (Exception o)
            {
                error = o.ToString();
            }
        }
        private void button1_Click(object sender, EventArgs e)
        {
            string         fname  = "";
            OpenFileDialog dialog = new OpenFileDialog();

            dialog.Filter =
                "Weka Files (*.arff)|*.arff|All files (*.*)|*.*";
            dialog.InitialDirectory = Application.StartupPath;
            dialog.Title            = "Select a .arff file";
            if (dialog.ShowDialog() == DialogResult.OK)
            {
                fname = dialog.FileName;
                //label5.Text = System.IO.Directory.;
            }
            if (fname == "")
            {
                return;
            }
            try
            {
                weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader(fname.ToString()));
                insts.setClassIndex(insts.numAttributes() - 1);


                Classifier cl = new weka.classifiers.functions.SMO();
                //label1.Text = "Performing " + percentSplit + "% split evaluation.";


                //randomize the order of the instances in the dataset.
                weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                myRandom.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myRandom);

                int trainSize             = insts.numInstances() * percentSplit / 100;
                int testSize              = insts.numInstances() - trainSize;
                weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);

                cl.buildClassifier(train);
                int numCorrect = 0;
                for (int i = trainSize; i < insts.numInstances(); i++)
                {
                    weka.core.Instance currentInst    = insts.instance(i);
                    double             predictedClass = cl.classifyInstance(currentInst);
                    if (predictedClass == insts.instance(i).classValue())
                    {
                        numCorrect++;
                    }
                }
                //label1.Text = numCorrect + " out of " + testSize + " correct (" +
                //(double)((double)numCorrect / (double)testSize * 100.0) + "%)";

                label6.Text = testSize.ToString();
                label7.Text = numCorrect.ToString();
                label8.Text = (double)((double)numCorrect / (double)testSize * 100.0) + "%";
                double result_perc = (double)((double)numCorrect / (double)testSize * 100.0);

                result_perc = Math.Truncate(result_perc);

                try
                {
                    // Send Data On Serial port
                    SerialPort serialPort = new SerialPort("COM" + textBox1.Text + "", Int32.Parse(textBox2.Text), Parity.None, 8);
                    serialPort.Open();

                    if (result_perc <= 75)
                    {
                        serialPort.WriteLine("1");
                    }


                    serialPort.WriteLine("a");


                    serialPort.Close();
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.Message);
                }
            }
            catch (java.lang.Exception ex)
            {
                MessageBox.Show(ex.getMessage().ToString(), "");
            }
        }
Exemple #5
0
        /// <summary>
        /// Plate by plate classification
        /// </summary>
        /// <param name="NeutralClass">Neutral class</param>
        /// <param name="IdxClassifier">Classifier Index (0:J48), (1:SVM), (2:NN), (3:KNN)</param>
        private void ClassificationPlateByPlate(int NeutralClass, int IdxClassifier)
        {
            int NumberOfPlates = cGlobalInfo.CurrentScreening.ListPlatesActive.Count;

            for (int PlateIdx = 0; PlateIdx < NumberOfPlates; PlateIdx++)
            {
                cPlate CurrentPlateToProcess = cGlobalInfo.CurrentScreening.ListPlatesActive.GetPlate(cGlobalInfo.CurrentScreening.ListPlatesActive[PlateIdx].GetName());
                cInfoClass InfoClass = CurrentPlateToProcess.GetNumberOfClassesBut(NeutralClass);
                // return;
                if (InfoClass.NumberOfClass <= 1)
                {
                    richTextBoxInfoClassif.AppendText(CurrentPlateToProcess.GetName() + " not processed.\n");
                    continue;
                }

                weka.core.Instances insts = CurrentPlateToProcess.CreateInstancesWithClasses(InfoClass, NeutralClass);
                Classifier ClassificationModel = null;
                string Text = "";
                switch (IdxClassifier)
                {
                    case 0: // J48
                        ClassificationModel = new weka.classifiers.trees.J48();
                        weka.classifiers.trees.J48 J48Model = (weka.classifiers.trees.J48)ClassificationModel;
                        J48Model.setMinNumObj((int)cGlobalInfo.OptionsWindow.numericUpDownJ48MinNumObjects.Value);
                        Text = "J48 - ";
                        break;
                    case 1: // SVM
                        ClassificationModel = new weka.classifiers.functions.SMO();
                        Text = "SVM - ";
                        break;
                    case 2: // NN
                        ClassificationModel = new weka.classifiers.functions.MultilayerPerceptron();
                        Text = "Neural Network - ";
                        break;
                    case 3: // KNN
                        ClassificationModel = new weka.classifiers.lazy.IBk((int)cGlobalInfo.OptionsWindow.numericUpDownKofKNN.Value);
                        Text = "K-Nearest Neighbor(s) - ";
                        break;
                    case 4: // Random Forest
                        ClassificationModel = new weka.classifiers.trees.RandomForest();
                        Text = "Random Forest - ";

                        break;
                    default:
                        break;
                }
                richTextBoxInfoClassif.AppendText(Text + InfoClass.NumberOfClass + " classes - Plate: ");

                richTextBoxInfoClassif.AppendText(CurrentPlateToProcess.GetName() + " OK \n");
                weka.core.Instances train = new weka.core.Instances(insts, 0, insts.numInstances());

                ClassificationModel.buildClassifier(train);
                cGlobalInfo.ConsoleWriteLine(ClassificationModel.ToString());

                weka.classifiers.Evaluation evaluation = new weka.classifiers.Evaluation(insts);
                evaluation.crossValidateModel(ClassificationModel, insts, 2, new java.util.Random(1));

                cGlobalInfo.ConsoleWriteLine(evaluation.toSummaryString());
                cGlobalInfo.ConsoleWriteLine(evaluation.toMatrixString());

                // update classification information of the current plate
                switch (IdxClassifier)
                {
                    case 0: // J48
                        weka.classifiers.trees.J48 CurrentClassifier = (weka.classifiers.trees.J48)(ClassificationModel);
                        CurrentPlateToProcess.GetInfoClassif().StringForTree = CurrentClassifier.graph().Remove(0, CurrentClassifier.graph().IndexOf("{") + 2);
                        break;
                    /*case 1: // SVM

                        break;
                    case 2: // NN

                        break;
                    case 3: // KNN

                        break;*/
                    default:
                        break;
                }

                CurrentPlateToProcess.GetInfoClassif().StringForQuality = evaluation.toSummaryString();
                CurrentPlateToProcess.GetInfoClassif().ConfusionMatrix = evaluation.toMatrixString();

                foreach (cWell TmpWell in CurrentPlateToProcess.ListActiveWells)
                {
                    weka.core.Instance currentInst = TmpWell.CreateInstanceForNClasses(InfoClass).instance(0);
                    double predictedClass = ClassificationModel.classifyInstance(currentInst);

                    TmpWell.SetClass(InfoClass.ListBackAssociation[(int)predictedClass]);
                }
            }
            return;
        }
Exemple #6
0
        /// <summary>
        /// Global classification
        /// </summary>
        /// <param name="NeutralClass">Neutral class</param>
        /// <param name="IdxClassifier">Classifier Index (0:J48), (1:SVM), (2:NN), (3:KNN)</param>
        private void ClassificationGlobal(int NeutralClass, int IdxClassifier)
        {
            cInfoClass InfoClass = cGlobalInfo.CurrentScreening.GetNumberOfClassesBut(NeutralClass);

            if (InfoClass.NumberOfClass <= 1)
            {
                richTextBoxInfoClassif.AppendText("Screening not processed.\n");
                return;
            }

            cExtendedTable TrainingTable = cGlobalInfo.CurrentScreening.ListPlatesActive.GetListActiveWells().GetAverageDescriptorValues(cGlobalInfo.CurrentScreening.ListDescriptors.GetActiveDescriptors(), false, true);

            weka.core.Instances insts = cGlobalInfo.CurrentScreening.CreateInstancesWithClasses(InfoClass, NeutralClass);
            Classifier ClassificationModel = null;

            switch (IdxClassifier)
            {
                case 0: // J48
                    ClassificationModel = new weka.classifiers.trees.J48();
                    weka.classifiers.trees.J48 J48Model = (weka.classifiers.trees.J48)ClassificationModel;
                    J48Model.setMinNumObj((int)cGlobalInfo.OptionsWindow.numericUpDownJ48MinNumObjects.Value);
                    richTextBoxInfoClassif.AppendText("\nC4.5 : " + InfoClass.NumberOfClass + " classes");
                    break;
                case 1: // SVM
                    ClassificationModel = new weka.classifiers.functions.SMO();
                    break;
                case 2: // NN
                    ClassificationModel = new weka.classifiers.functions.MultilayerPerceptron();
                    break;
                case 3: // KNN
                    ClassificationModel = new weka.classifiers.lazy.IBk((int)cGlobalInfo.OptionsWindow.numericUpDownKofKNN.Value);
                    break;
                case 4: // Random Forest
                    ClassificationModel = new weka.classifiers.trees.RandomForest();
                    break;
                default:
                    break;
            }

            weka.core.Instances train = new weka.core.Instances(insts, 0, insts.numInstances());

            ClassificationModel.buildClassifier(train);
            cGlobalInfo.ConsoleWriteLine(ClassificationModel.ToString());

            weka.classifiers.Evaluation evaluation = new weka.classifiers.Evaluation(insts);
            evaluation.crossValidateModel(ClassificationModel, insts, 2, new java.util.Random(1));

            cGlobalInfo.ConsoleWriteLine(evaluation.toSummaryString());
            cGlobalInfo.ConsoleWriteLine(evaluation.toMatrixString());

            // update classification information of the current plate
            string Text = "";
            switch (IdxClassifier)
            {
                case 0: // J48
                    Text = "J48 - ";
                    break;
                case 1: // SVM
                    //  ClassificationModel = new weka.classifiers.functions.SMO();
                    Text = "SVM - ";
                    break;
                case 2: // NN
                    // ClassificationModel = new weka.classifiers.functions.MultilayerPerceptron();
                    Text = "Neural Network - ";
                    break;
                case 3: // KNN
                    // ClassificationModel = new weka.classifiers.lazy.IBk((int)CompleteScreening.GlobalInfo.OptionsWindow.numericUpDownKofKNN.Value);
                    Text = "K-Nearest Neighbor(s) - ";
                    break;
                default:
                    break;
            }
            richTextBoxInfoClassif.AppendText(Text + InfoClass.NumberOfClass + " classes.");

            // CurrentPlateToProcess.GetInfoClassif().StringForQuality = evaluation.toSummaryString();
            //  CurrentPlateToProcess.GetInfoClassif().ConfusionMatrix = evaluation.toMatrixString();
            foreach (cPlate CurrentPlateToProcess in cGlobalInfo.CurrentScreening.ListPlatesActive)
            {
                foreach (cWell TmpWell in CurrentPlateToProcess.ListActiveWells)
                {
                    // return;
                    weka.core.Instance currentInst = TmpWell.CreateInstanceForNClasses(InfoClass).instance(0);
                    double predictedClass = ClassificationModel.classifyInstance(currentInst);
                    double[] ClassConfidence = ClassificationModel.distributionForInstance(currentInst);
                    double ConfidenceValue = ClassConfidence[(int)predictedClass];
                    TmpWell.SetClass(InfoClass.ListBackAssociation[(int)predictedClass], ConfidenceValue);
                }
            }
            return;
        }
        public List <double> testSMOUsingWeka(string[] attributeArray, string[] classNames, double[] dataValues, string classHeader, string defaultclass, string modelName, int hiddelLayers = 7, double learningRate = 0.03, double momentum = 0.4, int decimalPlaces = 2, int trainingTime = 1000)
        {
            java.util.ArrayList classLabel = new java.util.ArrayList();

            foreach (string className in classNames)
            {
                classLabel.Add(className);
            }
            weka.core.Attribute classHeaderName = new weka.core.Attribute(classHeader, classLabel);

            java.util.ArrayList attributeList = new java.util.ArrayList();
            foreach (string attribute in attributeArray)
            {
                weka.core.Attribute newAttribute = new weka.core.Attribute(attribute);
                attributeList.Add(newAttribute);
            }
            attributeList.add(classHeaderName);
            weka.core.Instances data = new weka.core.Instances("TestInstances", attributeList, 0);

            data.setClassIndex(data.numAttributes() - 1);
            // Set instance's values for the attributes
            weka.core.Instance inst_co = new DenseInstance(data.numAttributes());
            for (int i = 0; i < data.numAttributes() - 1; i++)
            {
                inst_co.setValue(i, Math.Round(dataValues.ElementAt(i), 5));
            }

            inst_co.setValue(classHeaderName, defaultclass);
            data.add(inst_co);
            weka.core.Instance currentInst = data.get(0);
            int j = 0;

            //foreach (float value in dataValues)
            //{
            //    // double roundedValue = Math.Round(value);
            //    //var rounded = Math.Floor(value * 100) / 100;
            //    if (array.ElementAt(j) != value)
            //    {
            //        System.Console.WriteLine("Masla occur");
            //    }
            //    j++;
            //}
            //double predictedClass = cl.classifyInstance(data.get(0));

            weka.classifiers.functions.SMO clRead = new weka.classifiers.functions.SMO();
            try
            {
                java.io.File path = new java.io.File("/models/");
                clRead = loadSMOModel(modelName, path);
            }
            catch (Exception e)
            {
                //string p1 = Assembly.GetExecutingAssembly().Location;
                string ClassifierName = Path.GetFileName(Path.GetFileName(modelName));
                string Path1          = HostingEnvironment.MapPath(@"~//libs//models//" + ClassifierName);
                //string Path1 = HostingEnvironment.MapPath(@"~//libs//models//FusionCustomized.model");
                clRead = (weka.classifiers.functions.SMO)weka.core.SerializationHelper.read(modelName);
            }
            // weka.classifiers.functions.SMO clRead = loadSMOModel(modelName, path);
            clRead.setBatchSize("100");

            clRead.setCalibrator(new weka.classifiers.functions.Logistic());
            clRead.setKernel(new weka.classifiers.functions.supportVector.PolyKernel());
            clRead.setEpsilon(1.02E-12);
            clRead.setC(1.0);
            clRead.setDebug(false);
            clRead.setChecksTurnedOff(false);
            clRead.setFilterType(new SelectedTag(weka.classifiers.functions.SMO.FILTER_NORMALIZE, weka.classifiers.functions.SMO.TAGS_FILTER));

            double classValue = clRead.classifyInstance(data.get(0));

            double[] predictionDistribution = clRead.distributionForInstance(data.get(0));
            //for (int predictionDistributionIndex = 0;
            //  predictionDistributionIndex < predictionDistribution.Count();
            //  predictionDistributionIndex++)
            //{
            //    string classValueString1 = classLabel.get(predictionDistributionIndex).ToString();
            //    double prob= predictionDistribution[predictionDistributionIndex]*100;
            //    System.Console.WriteLine(classValueString1 + ":" + prob);
            //}
            List <double> prediction = new List <double>();

            prediction.Add(classValue);
            //prediction.AddRange(predictionDistribution);
            return(prediction);
        }
        public void trainSMOUsingWeka(string wekaFile, string modelName)
        {
            try
            {
                weka.core.converters.CSVLoader csvLoader = new weka.core.converters.CSVLoader();
                csvLoader.setSource(new java.io.File(wekaFile));
                weka.core.Instances insts = csvLoader.getDataSet();
                //weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader(wekaFile));
                insts.setClassIndex(insts.numAttributes() - 1);

                cl = new weka.classifiers.functions.SMO();
                cl.setBatchSize("100");

                cl.setCalibrator(new weka.classifiers.functions.Logistic());
                cl.setKernel(new weka.classifiers.functions.supportVector.PolyKernel());
                cl.setEpsilon(1.02E-12);
                cl.setC(1.0);
                cl.setDebug(false);
                cl.setChecksTurnedOff(false);
                cl.setFilterType(new SelectedTag(weka.classifiers.functions.SMO.FILTER_NORMALIZE, weka.classifiers.functions.SMO.TAGS_FILTER));

                System.Console.WriteLine("Performing " + percentSplit + "% split evaluation.");

                //randomize the order of the instances in the dataset.
                // weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                //myRandom.setInputFormat(insts);
                // insts = weka.filters.Filter.useFilter(insts, myRandom);

                int trainSize             = insts.numInstances() * percentSplit / 100;
                int testSize              = insts.numInstances() - trainSize;
                weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);
                java.io.File        path  = new java.io.File("/models/");
                cl.buildClassifier(train);
                saveModel(cl, modelName, path);
                #region test whole set
                int numCorrect = 0;
                for (int i = 0; i < insts.numInstances(); i++)
                {
                    weka.core.Instance currentInst = insts.instance(i);
                    if (i == 12)
                    {
                        array = new List <float>();
                        foreach (float value in currentInst.toDoubleArray())
                        {
                            array.Add(value);
                        }
                    }

                    double predictedClass = cl.classifyInstance(currentInst);
                    if (predictedClass == insts.instance(i).classValue())
                    {
                        numCorrect++;
                    }
                }

                System.Console.WriteLine(numCorrect + " out of " + testSize + " correct (" +
                                         (double)((double)numCorrect / (double)testSize * 100.0) + "%)");
                #endregion
            }
            catch (java.lang.Exception ex)
            {
                ex.printStackTrace();
            }
        }