Ejemplo n.º 1
0
        public static string Classify(bool useRubine, float duration, bool righthandedness, List<float> SpeakerAngles, PointCollection pointHist, StylusPointCollection S, List<List<int>> hist, List<List<int>> ihist)
        {
            // Convert all parameters to format used in GestureTests
            List<Vector2> InterpretedPoints = new List<Vector2>();
            List<Vector2> StylusPoints = new List<Vector2>();
            List<Vector2> VelocityHistory = new List<Vector2>();
            List<Vector2> InverseVelocityHistory = new List<Vector2>();
            foreach(Point P in pointHist)
                InterpretedPoints.Add(new Vector2((float)P.X,(float)P.Y));
            foreach(StylusPoint P in S)
                StylusPoints.Add(new Vector2((float)P.X,(float)P.Y));
            for (int i = 0; i < hist[0].Count; i++)
            {
                VelocityHistory.Add(new Vector2(hist[0][i], hist[1][i]));
                InverseVelocityHistory.Add(new Vector2(ihist[0][i], ihist[1][i]));
            }

            // Create a new Sample, compute the features, and classify
            GS = new GestureSample(GestureTests.Types.GestureType.unknown, righthandedness,duration,SpeakerAngles,InterpretedPoints,StylusPoints,VelocityHistory,InverseVelocityHistory);
            GS.ComputeFeatures(GestureFeatures.PointsStroke);

            if (useRubine)
                return EC.Recognizer.Classify(GS).ToString();
            WriteARFF();

            Instances test = new Instances(new java.io.FileReader("outfile.arff"));
            test.setClassIndex(0);

            double clsLabel = cls.classifyInstance(test.instance(0));
            test.instance(0).setClassValue(clsLabel);

            // Return the appropriate label
            return ((GestureType2D)((int)clsLabel+1)).ToString();
        }
        public List <double> testMLPUsingWeka(string[] attributeArray, string[] classNames, double[] dataValues, string classHeader, string defaultclass, string modelName, int hiddelLayers = 7, double learningRate = 0.03, double momentum = 0.4, int decimalPlaces = 2, int trainingTime = 1000)
        {
            java.util.ArrayList classLabel = new java.util.ArrayList();
            foreach (string className in classNames)
            {
                classLabel.Add(className);
            }
            weka.core.Attribute classHeaderName = new weka.core.Attribute(classHeader, classLabel);

            java.util.ArrayList attributeList = new java.util.ArrayList();
            foreach (string attribute in attributeArray)
            {
                weka.core.Attribute newAttribute = new weka.core.Attribute(attribute);
                attributeList.Add(newAttribute);
            }
            attributeList.add(classHeaderName);
            weka.core.Instances data = new weka.core.Instances("TestInstances", attributeList, 0);
            data.setClassIndex(data.numAttributes() - 1);
            // Set instance's values for the attributes
            weka.core.Instance inst_co = new DenseInstance(data.numAttributes());
            for (int i = 0; i < data.numAttributes() - 1; i++)
            {
                inst_co.setValue(i, dataValues.ElementAt(i));
            }

            inst_co.setValue(classHeaderName, defaultclass);
            data.add(inst_co);

            java.io.File path = new java.io.File("/models/");
            weka.classifiers.functions.MultilayerPerceptron clRead = loadModel(modelName, path);
            clRead.setHiddenLayers(hiddelLayers.ToString());
            clRead.setLearningRate(learningRate);
            clRead.setMomentum(momentum);
            clRead.setNumDecimalPlaces(decimalPlaces);
            clRead.setTrainingTime(trainingTime);
            weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
            myRandom.setInputFormat(data);
            data = weka.filters.Filter.useFilter(data, myRandom);
            double classValue = clRead.classifyInstance(data.get(0));

            double[]      predictionDistribution  = clRead.distributionForInstance(data.get(0));
            List <double> predictionDistributions = new List <double>();

            for (int predictionDistributionIndex = 0;
                 predictionDistributionIndex < predictionDistribution.Count();
                 predictionDistributionIndex++)
            {
                string classValueString1 = classLabel.get(predictionDistributionIndex).ToString();
                double prob = predictionDistribution[predictionDistributionIndex] * 100;
                predictionDistributions.Add(prob);
            }
            List <double> prediction = new List <double>();

            prediction.Add(classValue);
            prediction.AddRange(predictionDistributions);
            return(prediction);
        }
    // Test the classification result of each map that a user played,
    // with the data available as if they were playing through it
    public static void classifyTest(String dataString, String playerID)
    {
        String results = "";
        try {
            java.io.StringReader stringReader = new java.io.StringReader(dataString);
            java.io.BufferedReader buffReader = new java.io.BufferedReader(stringReader);

            /* NOTE THAT FOR NAIVE BAYES ALL WEIGHTS CAN BE = 1*/
            //weka.core.converters.ConverterUtils.DataSource source = new weka.core.converters.ConverterUtils.DataSource("iris.arff");
            weka.core.Instances data = new weka.core.Instances(buffReader); //source.getDataSet();
            // setting class attribute if the data format does not provide this information
            // For example, the XRFF format saves the class attribute information as well
            if (data.classIndex() == -1)
                data.setClassIndex(data.numAttributes() - 1);

            weka.classifiers.Classifier cl;
            for (int i = 3; i < data.numInstances(); i++) {
                cl = new weka.classifiers.bayes.NaiveBayes();
                //cl = new weka.classifiers.trees.J48();
                //cl = new weka.classifiers.lazy.IB1();
                //cl = new weka.classifiers.functions.MultilayerPerceptron();
                ((weka.classifiers.functions.MultilayerPerceptron)cl).setHiddenLayers("12");

                weka.core.Instances subset = new weka.core.Instances(data,0,i);
                cl.buildClassifier(subset);

                weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(subset);
         		eval.crossValidateModel(cl, subset, 3, new java.util.Random(1));
                results = results + eval.pctCorrect(); // For accuracy measurement
                /* For Mathews Correlation Coefficient */
                //double TP = eval.numTruePositives(1);
                //double FP = eval.numFalsePositives(1);
                //double TN = eval.numTrueNegatives(1);
                //double FN = eval.numFalseNegatives(1);
                //double correlationCoeff = ((TP*TN)-(FP*FN))/Math.Sqrt((TP+FP)*(TP+FN)*(TN+FP)*(TN+FN));
                //results = results + correlationCoeff;
                if (i != data.numInstances()-1)
                    results = results + ", ";
                if(i == data.numInstances()-1)
                    Debug.Log("Player: " + playerID + ", Num Maps: " + data.numInstances() + ", AUC: " + eval.areaUnderROC(1));
            }
        } catch (java.lang.Exception ex)
        {
            Debug.LogError(ex.getMessage());
        }
        // Write values to file for a matlab read
        // For accuracy
         	StreamWriter writer = new StreamWriter("DataForMatlab/"+playerID+"_CrossFoldValidations_NeuralNet.txt");

        //StreamWriter writer = new StreamWriter("DataForMatlab/"+playerID+"_CrossFoldCorrCoeff.txt"); // For mathews cc
        writer.WriteLine(results);
        writer.Close();
        Debug.Log(playerID + " has been written to file");
    }
Ejemplo n.º 4
0
        public static double classifyTrain_Test(string classifierFileName, Classifier _classifier)
        {
            double performance = 0.0;

            try
            {
                FileReader          javaFileReader = new FileReader(classifierFileName);
                weka.core.Instances insts          = new weka.core.Instances(javaFileReader);
                javaFileReader.close();

                insts.setClassIndex(insts.numAttributes() - 1);

                System.Console.WriteLine("Performing " + percentSplit + "% split evaluation.");

                int trainSize             = insts.numInstances() * percentSplit / 100;
                int testSize              = insts.numInstances() - trainSize;
                weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);

                _classifier.buildClassifier(train);

                int numCorrect   = 0;
                var numnerOfInst = insts.numInstances();
                int dataIndex    = 0;
                for (int i = trainSize; i < numnerOfInst; i++)
                {
                    dataIndex++;
                    weka.core.Instance currentInst = insts.instance(i);

                    double   predictClass = _classifier.classifyInstance(currentInst);
                    double[] dist         = _classifier.distributionForInstance(currentInst);


                    string actualClass    = insts.classAttribute().value((int)insts.instance(i).classValue());
                    string predictedClass = insts.classAttribute().value((int)predictClass);


                    var abcd = _classifier.getClass();

                    if (predictedClass == actualClass)
                    {
                        numCorrect++;
                    }
                }
                performance = (double)((double)numCorrect / (double)testSize) * 100;

                System.Console.WriteLine(numCorrect + " out of " + testSize + " correct (" + performance.toString() + "%)");
            }
            catch (java.lang.Exception ex)
            {
                ex.printStackTrace();
            }

            return(performance);
        }
Ejemplo n.º 5
0
        public static void classifierTwo(string classifierFileName, string predictionModel)
        {
            FileReader javaFileReader = new FileReader(classifierFileName);

            weka.core.Instances wekaInsts = new weka.core.Instances(javaFileReader);
            javaFileReader.close();

            wekaInsts.setClassIndex(wekaInsts.numAttributes() - 1);



            //Classifier nbTree = (Classifier)SerializationHelper.read(Model) as J48;

            Instances testDataSet = new Instances(new BufferedReader(new FileReader(classifierFileName)));

            testDataSet.setClassIndex(wekaInsts.numAttributes() - 1);
            //testDataSet.setClassIndex(10);
            Evaluation evaluation = new Evaluation(testDataSet);


            J48 model = new J48();

            //Classifier myClassifier = (Classifier)SerializationHelper.read(Model) as NaiveBayes;
            //Classifier myClassifier = new NaiveBayes();


            for (int i = 0; i < testDataSet.numInstances(); i++)
            {
                Instance instance = testDataSet.instance(i);
                //evaluation.evaluateModelOnceAndRecordPrediction(myClassifier, instance);
                //evaluation.evaluateModelOnce(myClassifier, instance);
            }

            foreach (object o in evaluation.predictions().toArray())
            {
                NominalPrediction prediction = o as NominalPrediction;
                if (prediction != null)
                {
                    double[] distribution = prediction.distribution();
                    double   predicted    = prediction.predicted();

                    for (int i = 0; i < distribution.Length; i++)
                    {
                        System.Console.WriteLine(distribution[i]);
                    }

                    System.Console.WriteLine(predicted);
                }
            }

            System.Console.WriteLine(evaluation);
            System.Console.ReadKey();
        }
        public static double SupportVectorMachineTest(weka.core.Instances insts)
        {
            try
            {
                //weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader("iris.arff"));

                insts.setClassIndex(insts.numAttributes() - 1);


                SupportVectorMachine = new weka.classifiers.functions.SMO();

                weka.filters.Filter myDummy = new weka.filters.unsupervised.attribute.NominalToBinary();

                myDummy.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myDummy);


                weka.filters.Filter myNormalize = new weka.filters.unsupervised.instance.Normalize();
                myNormalize.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myNormalize);

                weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                myRandom.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myRandom);

                int trainSize             = insts.numInstances() * percentSplit / 100;
                int testSize              = insts.numInstances() - trainSize;
                weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);


                SupportVectorMachine.buildClassifier(train);


                int numCorrect = 0;
                for (int i = trainSize; i < insts.numInstances(); i++)
                {
                    weka.core.Instance currentInst    = insts.instance(i);
                    double             predictedClass = SupportVectorMachine.classifyInstance(currentInst);
                    if (predictedClass == insts.instance(i).classValue())
                    {
                        numCorrect++;
                    }
                }
                return((double)numCorrect / (double)testSize * 100.0);
            }
            catch (java.lang.Exception ex)
            {
                ex.printStackTrace();
                return(0);
            }
        }
Ejemplo n.º 7
0
        public static void classifierOne(string classifierFileName, string predictionModel)
        {
            FileReader javaFileReader = new FileReader(classifierFileName);

            weka.core.Instances wekaInsts = new weka.core.Instances(javaFileReader);
            javaFileReader.close();

            wekaInsts.setClassIndex(wekaInsts.numAttributes() - 1);
            Classifier cl = new SMO();

            //Classifier cl = new NaiveBayes();
            java.util.Random random     = new java.util.Random(1);
            Evaluation       evaluation = new Evaluation(wekaInsts);

            evaluation.crossValidateModel(cl, wekaInsts, 10, random);

            foreach (object o in evaluation.getMetricsToDisplay().toArray())
            {
            }
            int           count = 0;
            StringBuilder sb    = new StringBuilder();

            foreach (object o in evaluation.predictions().toArray())
            {
                NominalPrediction prediction = o as NominalPrediction;
                if (prediction != null)
                {
                    double[] distribution = prediction.distribution();
                    double   predicted    = prediction.predicted();
                    double   actual       = prediction.actual();
                    string   revision     = prediction.getRevision();
                    double   weight       = prediction.weight();
                    double   margine      = prediction.margin();
                    //bool equals = prediction.@equals();

                    string distributions = String.Empty;
                    for (int i = 0; i < distribution.Length; i++)
                    {
                        //System.Console.WriteLine(distribution[i]);
                        distributions += distribution[i];
                    }
                    var predictionLine = String.Format("{0} - {1} - {2} - {3} - {4} - {5}\n", actual, predicted, revision, weight, margine, distributions);
                    sb.Append(predictionLine);
                    //System.Console.WriteLine(predicted);
                }
                count++;
            }
            File_Helper.WriteToFile(sb, predictionModel + "NbCl.txt");
            System.Console.WriteLine(count);
            System.Console.ReadKey();
        }
 private async Task loadFileAndMakeElements(string location)
 {
     if (location.EndsWith(".csv"))
     {
         weka.core.converters.CSVLoader csvLoader = new weka.core.converters.CSVLoader();
         csvLoader.setSource(new java.io.File(location));
         insts = csvLoader.getDataSet();
         insts.setClassIndex(insts.numAttributes() - 1);
     }
     else
     {
         insts = new weka.core.Instances(new java.io.FileReader(location));
         insts.setClassIndex(insts.numAttributes() - 1);
     }
     flowLayoutPanel1.Controls.Clear();
     for (int i = 0; i < insts.numAttributes() - 1; i++)
     {
         if (insts.attribute(i).isNominal() == true)
         {
             if (insts.attribute(i).numValues() > 0)
             {
                 Label lbl = new Label();
                 lbl.Text    = insts.attribute(i).name().Trim();
                 lbl.Enabled = true;
                 ComboBox cmbBox = new ComboBox();
                 cmbBox.Name = insts.attribute(i).name();
                 for (int m = 0; m < insts.attribute(i).numValues(); m++)
                 {
                     cmbBox.Items.Add(insts.attribute(i).value(m));
                 }
                 cmbBox.DropDownStyle = ComboBoxStyle.DropDownList;
                 cmbBox.Enabled       = true;
                 flowLayoutPanel1.Controls.Add(lbl);
                 flowLayoutPanel1.Controls.Add(cmbBox);
             }
             else
             {
             }
         }
         else if (insts.attribute(i).isNumeric() == true)
         {
             Label lbl = new Label();
             lbl.Text = insts.attribute(i).name().Trim();
             TextBox txtBox = new TextBox();
             txtBox.Name      = insts.attribute(i).name();
             txtBox.KeyPress += new KeyPressEventHandler(txtBox_Keypress);
             flowLayoutPanel1.Controls.Add(lbl);
             flowLayoutPanel1.Controls.Add(txtBox);
         }
     }
 }
        private void button1_Click(object sender, EventArgs e)
        {
            weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader(file));
            double[]            Data  = new double[insts.numAttributes()];
            for (int i = 0; i < list.Count; i++)
            {
                if (list[i].GetType() == typeof(TextBox))
                {
                    TextBox txt   = (TextBox)list[i];
                    string  value = txt.Text.Replace('.', ',');
                    Data[i] = Convert.ToDouble(value);
                }
                else
                {
                    ComboBox combobox = (ComboBox)list[i];
                    Data[i] = Convert.ToDouble(combobox.SelectedIndex);
                }
            }
            // Data[(insts.numAttributes() - 1] = 0;
            insts.setClassIndex(insts.numAttributes() - 1);
            Instance newInsts = new Instance(1.0, Data);

            insts.add(newInsts);
            string type = model.GetType().ToString();

            if (type == "weka.classifiers.bayes.NaiveBayes")
            {
                weka.filters.Filter myDiscretize = new weka.filters.unsupervised.attribute.Discretize();
                myDiscretize.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myDiscretize);
            }
            else if (type == "weka.classifiers.lazy.IBk")
            {
                weka.filters.Filter myDummy = new weka.filters.unsupervised.attribute.NominalToBinary();
                myDummy.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myDummy);

                weka.filters.Filter myNormalize = new weka.filters.unsupervised.instance.Normalize();
                myNormalize.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myNormalize);
            }
            double index = model.classifyInstance(insts.lastInstance());

            string result = insts.attribute(insts.numAttributes() - 1).value(Convert.ToInt16(index));

            MessageBox.Show(result);
        }
Ejemplo n.º 10
0
        public void trainMachineForEmotionUsingWeka(string wekaFile, string modelName, int hiddelLayers = 7, double learningRate = 0.03, double momentum = 0.4, int decimalPlaces = 2, int trainingTime = 1000)
        {
            //"C:\\Users\\Gulraiz\\Desktop\\Genereted2.arff" "MLP"
            try
            {
                weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader(wekaFile));
                insts.setClassIndex(insts.numAttributes() - 1);
                weka.classifiers.functions.MultilayerPerceptron cl;
                cl = new weka.classifiers.functions.MultilayerPerceptron();
                cl.setHiddenLayers(hiddelLayers.ToString());
                cl.setLearningRate(learningRate);
                cl.setMomentum(momentum);
                cl.setNumDecimalPlaces(decimalPlaces);
                cl.setTrainingTime(trainingTime);

                System.Console.WriteLine("Performing " + percentSplit + "% split evaluation.");

                //randomize the order of the instances in the dataset.
                weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                myRandom.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myRandom);

                int trainSize             = insts.numInstances() * percentSplit / 100;
                int testSize              = insts.numInstances() - trainSize;
                weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);
                java.io.File        path  = new java.io.File("/models/");
                cl.buildClassifier(train);
                saveModel(cl, modelName, path);
                #region test whole set
                //int numCorrect = 0;
                //for (int i = trainSize; i < insts.numInstances(); i++)
                //{
                //    weka.core.Instance currentInst = insts.instance(i);
                //    double predictedClass = cl.classifyInstance(currentInst);
                //    if (predictedClass == insts.instance(i).classValue())
                //        numCorrect++;
                //}

                //System.Console.WriteLine(numCorrect + " out of " + testSize + " correct (" +
                //           (double)((double)numCorrect / (double)testSize * 100.0) + "%)");
                #endregion
            }
            catch (java.lang.Exception ex)
            {
                ex.printStackTrace();
            }
        }
Ejemplo n.º 11
0
        public static void JackKnife_Test_prepare(string classifierFileName, int baseClasses, Classifier _classifie)
        {
            for (int singleClass = 1; singleClass <= baseClasses; singleClass++)
            {
                string eachFileName = String.Format("{0}_{1}.arff", classifierFileName, singleClass);

                FileReader          javaFileReader = new FileReader(eachFileName);
                weka.core.Instances insts          = new weka.core.Instances(javaFileReader);
                javaFileReader.close();

                insts.setClassIndex(insts.numAttributes() - 1);

                var totalnstances = insts.numInstances();

                //insts.re
            }
        }
        public static double NaiveBayesTest(weka.core.Instances insts)
        {
            try
            {
                //weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader("iris.arff"));

                insts.setClassIndex(insts.numAttributes() - 1);


                NaiveBayescl = new weka.classifiers.bayes.NaiveBayes();


                //discretize
                weka.filters.Filter myDiscretize = new weka.filters.unsupervised.attribute.Discretize();
                myDiscretize.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myDiscretize);

                weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                myRandom.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myRandom);

                int trainSize             = insts.numInstances() * percentSplit / 100;
                int testSize              = insts.numInstances() - trainSize;
                weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);

                NaiveBayescl.buildClassifier(train);


                int numCorrect = 0;
                for (int i = trainSize; i < insts.numInstances(); i++)
                {
                    weka.core.Instance currentInst    = insts.instance(i);
                    double             predictedClass = NaiveBayescl.classifyInstance(currentInst);
                    if (predictedClass == insts.instance(i).classValue())
                    {
                        numCorrect++;
                    }
                }
                return((double)numCorrect / (double)testSize * 100.0);
            }
            catch (java.lang.Exception ex)
            {
                ex.printStackTrace();
                return(0);
            }
        }
Ejemplo n.º 13
0
        public void trainMachineForHybridUsingWeka(string wekaFile, string modelName)
        {
            weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader(wekaFile));
            insts.setClassIndex(insts.numAttributes() - 1);
            weka.classifiers.Classifier bagging = new weka.classifiers.meta.Bagging();

            System.Console.WriteLine("Performing " + percentSplit + "% split evaluation.");

            //randomize the order of the instances in the dataset.
            weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
            myRandom.setInputFormat(insts);
            insts = weka.filters.Filter.useFilter(insts, myRandom);

            int trainSize = insts.numInstances() * percentSplit / 100;
            int testSize  = insts.numInstances() - trainSize;

            weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);
            java.io.File        path  = new java.io.File("/models/");
            bagging.buildClassifier(train);
            saveModel(bagging, modelName, path);
        }
        public List <DrugBrandInfo> AssociateDrugs(int?drugId, string dataPath)
        {
            List <DrugBrandInfo> drugList = new List <DrugBrandInfo>();



            weka.core.Instances data = new weka.core.Instances(new java.io.FileReader(dataPath));


            data.setClassIndex(data.numAttributes() - 1);

            Apriori apriori = new Apriori();

            apriori.setClassIndex(data.classIndex());
            apriori.buildAssociations(data);

            FastVector[] vector = apriori.getAllTheRules();

            for (int i = 0; i < vector[0].size(); i++)
            {
                string value1 = ((AprioriItemSet)vector[0].elementAt(i)).toString(data);
                string value2 = ((AprioriItemSet)vector[1].elementAt(i)).toString(data);

                string[] set1 = value1.Split(' ', '=');
                string[] set2 = value2.Split(' ', '=');

                if (set1[0].Equals(drugId.ToString()))
                {
                    if (set1[1] == "1" && set2[1] == "1")
                    {
                        int brandId = Convert.ToInt32(set2[0]);
                        var drug    = db.DrugBrandInfos.SingleOrDefault(c => c.Id == brandId);
                        drugList.Add(drug);
                    }
                    break;
                }
            }
            return(drugList);
        }
Ejemplo n.º 15
0
        public string testHybridEmotionUsingWeka(string[] attributeArray, string[] classNames, double[] dataValues, string classHeader, string defaultclass, string modelName)
        {
            java.util.ArrayList classLabel = new java.util.ArrayList();
            foreach (string className in classNames)
            {
                classLabel.Add(className);
            }
            weka.core.Attribute classHeaderName = new weka.core.Attribute(classHeader, classLabel);

            java.util.ArrayList attributeList = new java.util.ArrayList();
            foreach (string attribute in attributeArray)
            {
                weka.core.Attribute newAttribute = new weka.core.Attribute(attribute);
                attributeList.Add(newAttribute);
            }
            attributeList.add(classHeaderName);
            weka.core.Instances data = new weka.core.Instances("TestInstances", attributeList, 0);
            data.setClassIndex(data.numAttributes() - 1);
            // Set instance's values for the attributes
            weka.core.Instance inst_co = new DenseInstance(data.numAttributes());
            for (int i = 0; i < data.numAttributes() - 1; i++)
            {
                inst_co.setValue(i, dataValues.ElementAt(i));
            }

            inst_co.setValue(classHeaderName, defaultclass);
            data.add(inst_co);

            java.io.File path = new java.io.File("/models/");
            weka.classifiers.meta.Bagging clRead   = loadBaggingModel(modelName, path);
            weka.filters.Filter           myRandom = new weka.filters.unsupervised.instance.Randomize();
            myRandom.setInputFormat(data);
            data = weka.filters.Filter.useFilter(data, myRandom);
            double classValue       = clRead.classifyInstance(data.get(0));
            string classValueString = classLabel.get(Int32.Parse(classValue.ToString())).ToString();

            return(classValueString);
        }
    /* Use when the player logs in to initially create the classifier with data from server */
    public void InitializeClassifier(String dataString)
    {
        try {
            java.io.StringReader stringReader = new java.io.StringReader(dataString);
            java.io.BufferedReader buffReader = new java.io.BufferedReader(stringReader);

            playerData = new weka.core.Instances(buffReader);

            /* State where in each Instance the class attribute is, if its not already specified by the file */
            if (playerData.classIndex() == -1)
                playerData.setClassIndex(playerData.numAttributes() - 1);

            /* NAIVE BAYES */
            //classifier = new weka.classifiers.bayes.NaiveBayes();

            /* NEURAL NET */
            //classifier = new weka.classifiers.functions.MultilayerPerceptron();
            //((weka.classifiers.functions.MultilayerPerceptron)classifier).setHiddenLayers("12");

            /* J48 TREE */
            //classifier = new weka.classifiers.trees.J48();

            /* IB1 NEAREST NEIGHBOUR */
            //classifier = new weka.classifiers.lazy.IB1();

            /* RANDOM FOREST */
            classifier = new weka.classifiers.trees.RandomForest();

            classifier.buildClassifier(playerData);
            Debug.Log("Initialized Classifier");
        }
        catch (java.lang.Exception ex)
        {
            Debug.LogError(ex.getMessage());
        }
    }
Ejemplo n.º 17
0
        /// <summary>
        /// Create an instances structure with classes for supervised methods
        /// </summary>
        /// <param name="NumClass"></param>
        /// <returns></returns>
        public Instances CreateInstancesWithClassesWithPlateBasedDescriptor(int NumberOfClass)
        {
            weka.core.FastVector atts = new FastVector();

            int columnNo = 0;

            for (int i = 0; i < ParentScreening.ListPlateBaseddescriptorNames.Count; i++)
            {
                atts.addElement(new weka.core.Attribute(ParentScreening.ListPlateBaseddescriptorNames[i]));
                columnNo++;
            }

            weka.core.FastVector attVals = new FastVector();

            for (int i = 0; i < NumberOfClass; i++)
                attVals.addElement("Class" + (i).ToString());

            atts.addElement(new weka.core.Attribute("Class", attVals));

            Instances data1 = new Instances("MyRelation", atts, 0);
            int IdxWell = 0;
            foreach (cWell CurrentWell in this.ListActiveWells)
            {
                if (CurrentWell.GetCurrentClassIdx() == -1) continue;
                double[] vals = new double[data1.numAttributes()];
                int IdxCol = 0;
                for (int Col = 0; Col < ParentScreening.ListPlateBaseddescriptorNames.Count; Col++)
                {
                    vals[IdxCol++] = CurrentWell.ListPlateBasedDescriptors[Col].GetValue();
                }
                vals[columnNo] = CurrentWell.GetCurrentClassIdx();
                data1.add(new DenseInstance(1.0, vals));
                IdxWell++;
            }
            data1.setClassIndex((data1.numAttributes() - 1));

            return data1;
        }
Ejemplo n.º 18
0
        public static void Test()
        {
            weka.core.Instances data = new weka.core.Instances(new java.io.FileReader("./data/Classification/Communication.arff"));
            data.setClassIndex(data.numAttributes() - 1);

            weka.classifiers.Classifier cls = new weka.classifiers.bayes.BayesNet();


            //Save BayesNet results in .txt file
            using (System.IO.StreamWriter file = new System.IO.StreamWriter("./data/Classification/Communication_Report.txt"))
            {
                file.WriteLine("Performing " + percentSplit + "% split evaluation.");

                int runs = 1;

                // perform cross-validation
                for (int i = 0; i < runs; i++)
                {
                    // randomize data
                    int seed = i + 1;
                    java.util.Random    rand     = new java.util.Random(seed);
                    weka.core.Instances randData = new weka.core.Instances(data);
                    randData.randomize(rand);

                    //weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(randData);

                    int trainSize             = (int)Math.Round((double)data.numInstances() * percentSplit / 100);
                    int testSize              = data.numInstances() - trainSize;
                    weka.core.Instances train = new weka.core.Instances(data, 0, 0);
                    weka.core.Instances test  = new weka.core.Instances(data, 0, 0);
                    train.setClassIndex(train.numAttributes() - 1);
                    test.setClassIndex(test.numAttributes() - 1);

                    //Print classifier analytics for all the dataset
                    file.WriteLine("EVALUATION OF TEST DATASET.");

                    //int numCorrect = 0;
                    for (int j = 0; j < data.numInstances(); j++)
                    {
                        weka.core.Instance currentInst = randData.instance(j);

                        if (j < trainSize)
                        {
                            train.add(currentInst);
                        }

                        else
                        {
                            test.add(currentInst);

                            /*
                             * double predictedClass = cls.classifyInstance(currentInst);
                             *
                             * double[] prediction = cls.distributionForInstance(currentInst);
                             *
                             * for (int p = 0; p < prediction.Length; p++)
                             * {
                             *  file.WriteLine("Probability of class [{0}] for [{1}] is: {2}", currentInst.classAttribute().value(p), currentInst, Math.Round(prediction[p], 4));
                             * }
                             * file.WriteLine();
                             *
                             * file.WriteLine();
                             * if (predictedClass == data.instance(j).classValue())
                             *  numCorrect++;*/
                        }
                    }

                    // build and evaluate classifier
                    cls.buildClassifier(train);

                    // Test the model
                    weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(randData);
                    eval.evaluateModel(cls, test);

                    // Print the results as in Weka explorer:
                    //Print statistics
                    String strSummaryTest = eval.toSummaryString();

                    file.WriteLine(strSummaryTest);
                    file.WriteLine();

                    //Print detailed class statistics
                    file.WriteLine(eval.toClassDetailsString());
                    file.WriteLine();

                    //Print confusion matrix
                    file.WriteLine(eval.toMatrixString());
                    file.WriteLine();

                    // Get the confusion matrix
                    double[][] cmMatrixTest = eval.confusionMatrix();

                    System.Console.WriteLine("Bayesian Network results saved in Communication_Report.txt file successfully.");
                }
            }
        }
Ejemplo n.º 19
0
        public static void BayesTest()
        {
            try
            {
                weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader("iris.arff"));
                insts.setClassIndex(insts.numAttributes() - 1);

                weka.classifiers.Classifier cl = new weka.classifiers.bayes.BayesNet();
                System.Console.WriteLine("Performing " + percentSplit + "% split evaluation.");

                //randomize the order of the instances in the dataset.
                weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                myRandom.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myRandom);

                int trainSize             = insts.numInstances() * percentSplit / 100;
                int testSize              = insts.numInstances() - trainSize;
                weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);
                weka.core.Instances test  = new weka.core.Instances(insts, 0, 0);


                cl.buildClassifier(train);
                //print model
                System.Console.WriteLine(cl);

                int numCorrect = 0;
                for (int i = trainSize; i < insts.numInstances(); i++)
                {
                    weka.core.Instance currentInst    = insts.instance(i);
                    double             predictedClass = cl.classifyInstance(currentInst);
                    test.add(currentInst);

                    double[] prediction = cl.distributionForInstance(currentInst);

                    for (int x = 0; x < prediction.Length; x++)
                    {
                        System.Console.WriteLine("Probability of class [{0}] for [{1}] is: {2}", currentInst.classAttribute().value(x), currentInst, Math.Round(prediction[x], 4));
                    }
                    System.Console.WriteLine();

                    if (predictedClass == insts.instance(i).classValue())
                    {
                        numCorrect++;
                    }
                }
                System.Console.WriteLine(numCorrect + " out of " + testSize + " correct (" +
                                         (double)((double)numCorrect / (double)testSize * 100.0) + "%)");

                // Train the model
                weka.classifiers.Evaluation eTrain = new weka.classifiers.Evaluation(train);
                eTrain.evaluateModel(cl, train);

                // Print the results as in Weka explorer:
                //Print statistics
                String strSummaryTrain = eTrain.toSummaryString();
                System.Console.WriteLine(strSummaryTrain);

                //Print detailed class statistics
                System.Console.WriteLine(eTrain.toClassDetailsString());

                //Print confusion matrix
                System.Console.WriteLine(eTrain.toMatrixString());

                // Get the confusion matrix
                double[][] cmMatrixTrain = eTrain.confusionMatrix();


                // Test the model
                weka.classifiers.Evaluation eTest = new weka.classifiers.Evaluation(test);
                eTest.evaluateModel(cl, test);

                // Print the results as in Weka explorer:
                //Print statistics
                String strSummaryTest = eTest.toSummaryString();
                System.Console.WriteLine(strSummaryTest);

                //Print detailed class statistics
                System.Console.WriteLine(eTest.toClassDetailsString());

                //Print confusion matrix
                System.Console.WriteLine(eTest.toMatrixString());

                // Get the confusion matrix
                double[][] cmMatrixTest = eTest.confusionMatrix();
            }

            catch (java.lang.Exception ex)
            {
                ex.printStackTrace();
            }
        }
Ejemplo n.º 20
0
        public static void cvdTest()
        {
            weka.core.Instances data = new weka.core.Instances(new java.io.FileReader("./data/Classification/Communication.arff"));
            data.setClassIndex(data.numAttributes() - 1);

            weka.classifiers.Classifier cls = new weka.classifiers.bayes.NaiveBayes();

            //Save BayesNet results in .txt file
            using (System.IO.StreamWriter file = new System.IO.StreamWriter("./data/Classification/Communication_Report.txt"))
            {
                int runs  = 1;
                int folds = 10;

                // perform cross-validation
                for (int i = 0; i < runs; i++)
                {
                    // randomize data
                    int seed = i + 1;
                    java.util.Random    rand     = new java.util.Random(seed);
                    weka.core.Instances randData = new weka.core.Instances(data);
                    randData.randomize(rand);
                    if (randData.classAttribute().isNominal())
                    {
                        randData.stratify(folds);
                    }

                    weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(randData);
                    for (int n = 0; n < folds; n++)
                    {
                        weka.core.Instances train = randData.trainCV(folds, n);
                        weka.core.Instances test  = randData.testCV(folds, n);
                        // build and evaluate classifier
                        //weka.classifiers.Classifier clsCopy = weka.classifiers.Classifier.makeCopy(cls);
                        cls.buildClassifier(train);
                        //eval.evaluateModel(cls, test);

                        //Print classifier analytics for all the dataset
                        file.WriteLine("EVALUATION OF TEST DATASET.");
                        // Test the model
                        weka.classifiers.Evaluation eTest = new weka.classifiers.Evaluation(test);
                        eTest.evaluateModel(cls, test);

                        // Print the results as in Weka explorer:
                        //Print statistics
                        String strSummaryTest = eTest.toSummaryString();

                        file.WriteLine(strSummaryTest);
                        file.WriteLine();

                        //Print detailed class statistics
                        file.WriteLine(eTest.toClassDetailsString());
                        file.WriteLine();

                        //Print confusion matrix
                        file.WriteLine(eTest.toMatrixString());
                        file.WriteLine();

                        // Get the confusion matrix
                        double[][] cmMatrixTest = eTest.confusionMatrix();

                        System.Console.WriteLine("Bayesian Network results saved in Communication_Report.txt file successfully.");
                    }

                    //Print classifier analytics for all the dataset
                    file.WriteLine("EVALUATION OF ALL DATASET.");

                    cls.buildClassifier(data);

                    // Train the model
                    weka.classifiers.Evaluation eAlldata = new weka.classifiers.Evaluation(data);
                    eAlldata.evaluateModel(cls, data);

                    // Print the results as in Weka explorer:
                    //Print statistics
                    String strSummaryAlldata = eAlldata.toSummaryString();
                    file.WriteLine(strSummaryAlldata);
                    file.WriteLine();

                    //Print detailed class statistics
                    file.WriteLine(eAlldata.toClassDetailsString());
                    file.WriteLine();

                    //Print confusion matrix
                    file.WriteLine(eAlldata.toMatrixString());
                    file.WriteLine("----------------");

                    //print model
                    file.WriteLine(cls);
                    file.WriteLine();
                }
            }
        }
        public List <double> testSMOUsingWeka(string[] attributeArray, string[] classNames, double[] dataValues, string classHeader, string defaultclass, string modelName, int hiddelLayers = 7, double learningRate = 0.03, double momentum = 0.4, int decimalPlaces = 2, int trainingTime = 1000)
        {
            java.util.ArrayList classLabel = new java.util.ArrayList();

            foreach (string className in classNames)
            {
                classLabel.Add(className);
            }
            weka.core.Attribute classHeaderName = new weka.core.Attribute(classHeader, classLabel);

            java.util.ArrayList attributeList = new java.util.ArrayList();
            foreach (string attribute in attributeArray)
            {
                weka.core.Attribute newAttribute = new weka.core.Attribute(attribute);
                attributeList.Add(newAttribute);
            }
            attributeList.add(classHeaderName);
            weka.core.Instances data = new weka.core.Instances("TestInstances", attributeList, 0);

            data.setClassIndex(data.numAttributes() - 1);
            // Set instance's values for the attributes
            weka.core.Instance inst_co = new DenseInstance(data.numAttributes());
            for (int i = 0; i < data.numAttributes() - 1; i++)
            {
                inst_co.setValue(i, Math.Round(dataValues.ElementAt(i), 5));
            }

            inst_co.setValue(classHeaderName, defaultclass);
            data.add(inst_co);
            weka.core.Instance currentInst = data.get(0);
            int j = 0;

            //foreach (float value in dataValues)
            //{
            //    // double roundedValue = Math.Round(value);
            //    //var rounded = Math.Floor(value * 100) / 100;
            //    if (array.ElementAt(j) != value)
            //    {
            //        System.Console.WriteLine("Masla occur");
            //    }
            //    j++;
            //}
            //double predictedClass = cl.classifyInstance(data.get(0));

            weka.classifiers.functions.SMO clRead = new weka.classifiers.functions.SMO();
            try
            {
                java.io.File path = new java.io.File("/models/");
                clRead = loadSMOModel(modelName, path);
            }
            catch (Exception e)
            {
                //string p1 = Assembly.GetExecutingAssembly().Location;
                string ClassifierName = Path.GetFileName(Path.GetFileName(modelName));
                string Path1          = HostingEnvironment.MapPath(@"~//libs//models//" + ClassifierName);
                //string Path1 = HostingEnvironment.MapPath(@"~//libs//models//FusionCustomized.model");
                clRead = (weka.classifiers.functions.SMO)weka.core.SerializationHelper.read(modelName);
            }
            // weka.classifiers.functions.SMO clRead = loadSMOModel(modelName, path);
            clRead.setBatchSize("100");

            clRead.setCalibrator(new weka.classifiers.functions.Logistic());
            clRead.setKernel(new weka.classifiers.functions.supportVector.PolyKernel());
            clRead.setEpsilon(1.02E-12);
            clRead.setC(1.0);
            clRead.setDebug(false);
            clRead.setChecksTurnedOff(false);
            clRead.setFilterType(new SelectedTag(weka.classifiers.functions.SMO.FILTER_NORMALIZE, weka.classifiers.functions.SMO.TAGS_FILTER));

            double classValue = clRead.classifyInstance(data.get(0));

            double[] predictionDistribution = clRead.distributionForInstance(data.get(0));
            //for (int predictionDistributionIndex = 0;
            //  predictionDistributionIndex < predictionDistribution.Count();
            //  predictionDistributionIndex++)
            //{
            //    string classValueString1 = classLabel.get(predictionDistributionIndex).ToString();
            //    double prob= predictionDistribution[predictionDistributionIndex]*100;
            //    System.Console.WriteLine(classValueString1 + ":" + prob);
            //}
            List <double> prediction = new List <double>();

            prediction.Add(classValue);
            //prediction.AddRange(predictionDistribution);
            return(prediction);
        }
        public void trainSMOUsingWeka(string wekaFile, string modelName)
        {
            try
            {
                weka.core.converters.CSVLoader csvLoader = new weka.core.converters.CSVLoader();
                csvLoader.setSource(new java.io.File(wekaFile));
                weka.core.Instances insts = csvLoader.getDataSet();
                //weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader(wekaFile));
                insts.setClassIndex(insts.numAttributes() - 1);

                cl = new weka.classifiers.functions.SMO();
                cl.setBatchSize("100");

                cl.setCalibrator(new weka.classifiers.functions.Logistic());
                cl.setKernel(new weka.classifiers.functions.supportVector.PolyKernel());
                cl.setEpsilon(1.02E-12);
                cl.setC(1.0);
                cl.setDebug(false);
                cl.setChecksTurnedOff(false);
                cl.setFilterType(new SelectedTag(weka.classifiers.functions.SMO.FILTER_NORMALIZE, weka.classifiers.functions.SMO.TAGS_FILTER));

                System.Console.WriteLine("Performing " + percentSplit + "% split evaluation.");

                //randomize the order of the instances in the dataset.
                // weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                //myRandom.setInputFormat(insts);
                // insts = weka.filters.Filter.useFilter(insts, myRandom);

                int trainSize             = insts.numInstances() * percentSplit / 100;
                int testSize              = insts.numInstances() - trainSize;
                weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);
                java.io.File        path  = new java.io.File("/models/");
                cl.buildClassifier(train);
                saveModel(cl, modelName, path);
                #region test whole set
                int numCorrect = 0;
                for (int i = 0; i < insts.numInstances(); i++)
                {
                    weka.core.Instance currentInst = insts.instance(i);
                    if (i == 12)
                    {
                        array = new List <float>();
                        foreach (float value in currentInst.toDoubleArray())
                        {
                            array.Add(value);
                        }
                    }

                    double predictedClass = cl.classifyInstance(currentInst);
                    if (predictedClass == insts.instance(i).classValue())
                    {
                        numCorrect++;
                    }
                }

                System.Console.WriteLine(numCorrect + " out of " + testSize + " correct (" +
                                         (double)((double)numCorrect / (double)testSize * 100.0) + "%)");
                #endregion
            }
            catch (java.lang.Exception ex)
            {
                ex.printStackTrace();
            }
        }
    // Test the classification result of each map that a user played,
    // with the data available as if they were playing through it
    public static void classifyTest(String dataString, String playerID)
    {
        try {
            java.io.StringReader stringReader = new java.io.StringReader(dataString);
            java.io.BufferedReader buffReader = new java.io.BufferedReader(stringReader);

            /* NOTE THAT FOR NAIVE BAYES ALL WEIGHTS CAN BE = 1*/
            //weka.core.converters.ConverterUtils.DataSource source = new weka.core.converters.ConverterUtils.DataSource("iris.arff");
            weka.core.Instances thisData = new weka.core.Instances(buffReader); //source.getDataSet();
            if (thisData.classIndex() == -1)
                thisData.setClassIndex(thisData.numAttributes() - 1);

            weka.core.Instances thisUniqueData = new weka.core.Instances(thisData);
            if (thisUniqueData.classIndex() == -1)
                thisUniqueData.setClassIndex(thisUniqueData.numAttributes() - 1);
            thisUniqueData.delete();

            if (allUniqueData == null) {
                allUniqueData = new weka.core.Instances(thisData);
                if (allUniqueData.classIndex() == -1)
                    allUniqueData.setClassIndex(allUniqueData.numAttributes() - 1);
                allUniqueData.delete();
            }

            weka.core.InstanceComparator com = new weka.core.InstanceComparator(false);

            for (int i = 0; i < thisData.numInstances(); i++)
            {
                bool dup = false;
                for (int j = 0; j < allUniqueData.numInstances(); j++)
                {
                    if (com.compare(thisData.instance(i),allUniqueData.instance(j)) == 0)
                    {
                        Debug.Log("Duplicate found!");
                        dup = true;
                        break;
                    }
                }
                if (!dup)
                    allUniqueData.add(thisData.instance(i));
                else
                    dupInstances++;
            }

            for (int i = 0; i < thisData.numInstances(); i++)
            {
                bool dup = false;
                for (int j = 0; j < thisUniqueData.numInstances(); j++)
                {
                    if (com.compare(thisData.instance(i),thisUniqueData.instance(j)) == 0)
                    {
                        Debug.Log("Duplicate found!");
                        dup = true;
                        break;
                    }
                }
                if (!dup)
                    thisUniqueData.add(thisData.instance(i));
                else
                    dupInstancesSamePlayer++;
            }

            //Debug.Log("All Data Instance Count = " + thisData.numInstances());
            //Debug.Log("Unique Data Instance Count = " + thisUniqueData.numInstances());
            //Debug.Log("Done!");

        } catch (java.lang.Exception ex)
        {
            Debug.LogError(ex.getMessage());
        }
    }
Ejemplo n.º 24
0
        public static double classifyCrossFold_Train_Test_onlySelectedClass(string classifierFileName, int baseClasses, Classifier _classifier)
        {
            double performance = 0.0;

            try
            {
                List <BrResult> results = new List <BrResult>();
                for (int singleClass = 1; singleClass <= baseClasses; singleClass++)
                {
                    string eachFileName = String.Format("{0}_{1}.arff", classifierFileName, singleClass);

                    BrResult result = new BrResult();
                    result.classNumber = singleClass;

                    FileReader          javaFileReader = new FileReader(eachFileName);
                    weka.core.Instances insts          = new weka.core.Instances(javaFileReader);
                    javaFileReader.close();

                    insts.setClassIndex(insts.numAttributes() - 1);


                    List <Result> eachResults = new List <Result>();

                    var       totalnstances  = insts.numInstances();
                    var       foldsInstances = totalnstances / 10;
                    Instances foldsData      = new Instances(insts);
                    var       folds          = 10;
                    int       numCorrect     = 0;
                    int       dataIndex      = 0;
                    for (int n = 0; n < folds; n++)
                    {
                        System.Console.WriteLine("Performing " + n + " folds");

                        Instances trainFold         = foldsData.trainCV(folds, n);
                        var       numnerOfTrainInst = trainFold.numInstances();

                        Instances testFold         = foldsData.testCV(folds, n);
                        var       numnerOfTestInst = testFold.numInstances();


                        _classifier.buildClassifier(trainFold);

                        //List<Result> eachResults = new List<Result>();
                        for (int test = 0; test < numnerOfTestInst; test++)
                        {
                            dataIndex++;
                            Result eachRow = new Result();
                            eachRow.lineIndex = 0;
                            weka.core.Instance currentInst = testFold.instance(test);

                            double predictClass = _classifier.classifyInstance(currentInst);
                            //double[] dist = _classifier.distributionForInstance(currentInst);

                            string actualClass    = testFold.classAttribute().value((int)testFold.instance(test).classValue());
                            string predictedClass = testFold.classAttribute().value((int)predictClass);

                            //var abcd = _classifier.getClass();

                            if (predictedClass == actualClass)
                            {
                                eachRow.correct = "1";
                                numCorrect++;
                            }
                            else
                            {
                                eachRow.correct = "0";
                            }
                            eachRow.lineIndex      = dataIndex;
                            eachRow.classActual    = actualClass;
                            eachRow.classPredicted = predictedClass;

                            eachResults.Add(eachRow);
                        }
                    }
                    result.classResult = eachResults;
                    results.Add(result);
                    //System.Console.WriteLine(numCorrect + " out of " + testSize + " correct (" + (double)((double)numCorrect / (double)testSize * 100.0) + "%)");
                }

                #region Evaludation Matrix
                var evaluationMatrix = new Dictionary <int, string>();

                foreach (var res in results)
                {
                    foreach (var classRes in res.classResult)
                    {
                        if (!evaluationMatrix.Keys.Contains(classRes.lineIndex))
                        {
                            evaluationMatrix[classRes.lineIndex] = classRes.correct.toString();
                        }
                        else
                        {
                            evaluationMatrix[classRes.lineIndex] = evaluationMatrix[classRes.lineIndex].toString() + "," + classRes.correct.toString();
                        }
                    }
                }
                #endregion

                #region
                int correnctlyClassified   = 0;
                int incorrenctlyClassified = 0;
                int totalData = evaluationMatrix.Count;
                foreach (var key in evaluationMatrix.Keys)
                {
                    string   multiLevelClass = evaluationMatrix[key].ToString();
                    string[] a = multiLevelClass.Split(',');

                    int classPredect = 0;
                    for (int i = 0; i < a.Length; i++)
                    {
                        if (a[i] == "0")
                        {
                            classPredect++;
                        }
                    }
                    if (classPredect == 0)
                    {
                        correnctlyClassified++;
                    }
                    else if (classPredect > 0)
                    {
                        incorrenctlyClassified++;
                    }
                }

                performance = (double)((double)correnctlyClassified / (double)totalData) * 100;
                System.Console.WriteLine(performance);
                #endregion
            }
            catch (java.lang.Exception ex)
            {
                ex.printStackTrace();
            }
            return(performance);
        }
Ejemplo n.º 25
0
        /// <summary>
        /// Create an instances structure with classes for supervised methods
        /// </summary>
        /// <param name="NumClass"></param>
        /// <returns></returns>
        public Instances CreateInstancesWithClasses(cInfoClass InfoClass, int NeutralClass)
        {
            weka.core.FastVector atts = new FastVector();

            int columnNo = 0;

            for (int i = 0; i < ParentScreening.ListDescriptors.Count; i++)
            {
                if (ParentScreening.ListDescriptors[i].IsActive() == false) continue;
                atts.addElement(new weka.core.Attribute(ParentScreening.ListDescriptors[i].GetName()));
                columnNo++;
            }

            weka.core.FastVector attVals = new FastVector();

            for (int i = 0; i < InfoClass.NumberOfClass; i++)
                attVals.addElement("Class__" + (i).ToString());

            atts.addElement(new weka.core.Attribute("Class__", attVals));

            Instances data1 = new Instances("MyRelation", atts, 0);
            int IdxWell = 0;
            foreach (cWell CurrentWell in this.ListActiveWells)
            {
                if (CurrentWell.GetCurrentClassIdx() == NeutralClass) continue;
                double[] vals = new double[data1.numAttributes()];

                int IdxCol = 0;
                for (int Col = 0; Col < ParentScreening.ListDescriptors.Count; Col++)
                {
                    if (ParentScreening.ListDescriptors[Col].IsActive() == false) continue;
                    vals[IdxCol++] = CurrentWell.ListSignatures[Col].GetValue();
                }
                vals[columnNo] = InfoClass.CorrespondanceTable[CurrentWell.GetCurrentClassIdx()];
                data1.add(new DenseInstance(1.0, vals));
                IdxWell++;
            }
            data1.setClassIndex((data1.numAttributes() - 1));

            return data1;
        }
Ejemplo n.º 26
0
        /// <summary>
        /// Create an instances structure with classes for supervised methods
        /// </summary>
        /// <param name="NumClass"></param>
        /// <returns></returns>
        public Instances CreateInstancesWithClasses(List<bool> ListClassSelected)
        {
            weka.core.FastVector atts = new FastVector();
            int columnNo = 0;
            for (int i = 0; i < ParentScreening.ListDescriptors.Count; i++)
            {
                if (ParentScreening.ListDescriptors[i].IsActive() == false) continue;
                atts.addElement(new weka.core.Attribute(ParentScreening.ListDescriptors[i].GetName()));
                columnNo++;
            }

            weka.core.FastVector attVals = new FastVector();
            foreach (var item in cGlobalInfo.ListWellClasses)
            {
                attVals.addElement(item.Name);
            }

            atts.addElement(new weka.core.Attribute("ClassAttribute", attVals));

            Instances data1 = new Instances("MyRelation", atts, 0);
            int IdxWell = 0;
            foreach (cWell CurrentWell in this.ListActiveWells)
            {
                if (!ListClassSelected[CurrentWell.GetCurrentClassIdx()]) continue;
                double[] vals = new double[data1.numAttributes()];

                int IdxCol = 0;
                for (int Col = 0; Col < ParentScreening.ListDescriptors.Count; Col++)
                {
                    if (ParentScreening.ListDescriptors[Col].IsActive() == false) continue;
                    vals[IdxCol++] = CurrentWell.ListSignatures[Col].GetValue();
                }
                vals[columnNo] = CurrentWell.GetCurrentClassIdx();
                data1.add(new DenseInstance(1.0, vals));
                IdxWell++;
            }
            data1.setClassIndex((data1.numAttributes() - 1));

            return data1;
        }
Ejemplo n.º 27
0
 private Instances createWhyInstances()
 {
     FastVector fvWhy = createWhyFastVector();
     Instances whyInstances = new Instances("WhyInstances", fvWhy, listSecondaryWhyCandidates.Count);
     foreach (Token candidate in listSecondaryWhyCandidates)
     {
         if (candidate.Value == null) continue;
         Instance whyInstance = createSingleWhyInstance(fvWhy, candidate);
         whyInstance.setDataset(whyInstances);
         whyInstances.add(whyInstance);
     }
     whyInstances.setClassIndex(fvWhy.size() - 1);
     return whyInstances;
 }
Ejemplo n.º 28
0
        // ---- OPERATIONS ----
        ///    
        ///     <summary> * Analyze the time series data. The similarity matrices are created
        ///     * and filled with euclidean distances based on the tolerance values
        ///     * for similarity.
        ///     * </summary>
        ///     * <param name="data"> data to be analyzed </param>
        public override void analyze(Instances data)
        {
            data.setClassIndex(data.numAttributes() - 1);

            m_data = data;
            m_rangeTemplates.setUpper(data.numAttributes());

            //Date startFT = new Date();

            // compute fourier transform
            FourierTransform dftFilter = new FourierTransform();
            dftFilter.setInputFormat(data);
            dftFilter.setNumCoeffs(getNumCoeffs());
            dftFilter.setUseFFT(getUseFFT());
            Instances fourierdata = Filter.useFilter(data, dftFilter);

            Date endFT = new Date();

            // time taken for FT
            //m_DFTTime = new Date(endFT.getTime() - startFT.getTime());

            int numdim = data.numAttributes();
            //ORIGINAL LINE: m_distancesFreq = new double[numdim][numdim];
            //JAVA TO VB & C# CONVERTER NOTE: The following call to the 'RectangularArrays' helper class reproduces the rectangular array initialization that is automatic in Java:
            m_distancesFreq = RectangularArrays.ReturnRectangularDoubleArray(numdim, numdim);
            //ORIGINAL LINE: m_distancesTime = new double[numdim][numdim];
            //JAVA TO VB & C# CONVERTER NOTE: The following call to the 'RectangularArrays' helper class reproduces the rectangular array initialization that is automatic in Java:
            m_distancesTime = RectangularArrays.ReturnRectangularDoubleArray(numdim, numdim);

            //long ftDistTime = 0;
            //long tDistTime = 0;

            // compute similarity matrices
            for (int i = 0; i < data.numAttributes(); ++i)
            {
                for (int j = 0; j < i; j++)
                {
                // not for template sequences
                    if (m_rangeTemplates.isInRange(i) && m_rangeTemplates.isInRange(j))
                    {
                        continue;
                    }

                    //Date startFTDist = new Date();

                // Compute the Euclidean distance between 2 dims using FT
                    double[] reCT = fourierdata.attributeToDoubleArray(2 * i);
                    double[] imCT = fourierdata.attributeToDoubleArray(2 * i + 1);

                    double[] reCS = fourierdata.attributeToDoubleArray(2 * j);
                    double[] imCS = fourierdata.attributeToDoubleArray(2 * j + 1);

                    m_distancesFreq[i][j] = computeEuclidean(reCT, imCT, reCS, imCS);

                // if found similar using FT
                    if (m_distancesFreq[i][j] <= m_epsilon)
                    {
                    // then compute normal Euclidean distances between the 2 dims
                        double[] x = data.attributeToDoubleArray(i);
                        double[] y = data.attributeToDoubleArray(j);

                        m_distancesTime[i][j] = computeEuclidean(x, y);
                    }

                    //Date endFTDist = new Date();

                // time taken for computing similarity based on FT
                    //ftDistTime += (endFTDist.getTime() - startFTDist.getTime());

                //    Date startDist = new Date();

                //// compute similarity matrices (brute force)
                //    double[] x1 = data.attributeToDoubleArray(i);
                //    double[] y1 = data.attributeToDoubleArray(j);

                //    computeEuclidean(x1, y1);

                //    Date endDist = new Date();
                //// time taken for computing similarity based brute force method
                //    tDistTime += (endDist.getTime() - startDist.getTime());

                }
            }

            //m_FTEuclideanTime = new Date(ftDistTime);
            //m_EuclideanTime = new Date(tDistTime);
        }
Ejemplo n.º 29
0
        /// <summary>
        /// Create a single instance for WEKA
        /// </summary>
        /// <param name="NClasses">Number of classes</param>
        /// <returns>the weka instances</returns>
        public Instances CreateInstanceForNClasses(cInfoClass InfoClass)
        {
            List<double> AverageList = new List<double>();

            for (int i = 0; i < Parent.ListDescriptors.Count; i++)
                if (Parent.ListDescriptors[i].IsActive()) AverageList.Add(GetAverageValuesList()[i]);

            weka.core.FastVector atts = new FastVector();

            List<string> NameList = Parent.ListDescriptors.GetListNameActives();

            for (int i = 0; i < NameList.Count; i++)
                atts.addElement(new weka.core.Attribute(NameList[i]));

            weka.core.FastVector attVals = new FastVector();
            for (int i = 0; i < InfoClass.NumberOfClass; i++)
                attVals.addElement("Class" + i);

            atts.addElement(new weka.core.Attribute("Class__", attVals));

            Instances data1 = new Instances("SingleInstance", atts, 0);

            double[] newTable = new double[AverageList.Count + 1];
            Array.Copy(AverageList.ToArray(), 0, newTable, 0, AverageList.Count);
            //newTable[AverageList.Count] = 1;

            data1.add(new DenseInstance(1.0, newTable));
            data1.setClassIndex((data1.numAttributes() - 1));
            return data1;
        }
Ejemplo n.º 30
0
        /// <summary>
        /// Build the learning model for classification
        /// </summary>
        /// <param name="InstancesList">list of instances </param>
        /// <param name="NumberofClusters">Number of Clusters</param>
        /// <param name="TextBoxForFeedback">Text box for the results (can be NULL)</param>
        /// <param name="PanelForVisualFeedback">Panel to display visual results if avalaible (can be NULL)</param>
        public Classifier PerformTraining(FormForClassificationInfo WindowForClassificationParam, Instances InstancesList, /*int NumberofClusters,*/ RichTextBox TextBoxForFeedback,
                                            Panel PanelForVisualFeedback, out weka.classifiers.Evaluation ModelEvaluation, bool IsCellular)
        {
            //   weka.classifiers.Evaluation ModelEvaluation = null;
            // FormForClassificationInfo WindowForClassificationParam = new FormForClassificationInfo(GlobalInfo);
            ModelEvaluation = null;
            //  if (WindowForClassificationParam.ShowDialog() != System.Windows.Forms.DialogResult.OK) return null;
            //   weka.classifiers.Evaluation ModelEvaluation = new Evaluation(

            cParamAlgo ClassifAlgoParams = WindowForClassificationParam.GetSelectedAlgoAndParameters();
            if (ClassifAlgoParams == null) return null;

            //this.Cursor = Cursors.WaitCursor;

            //  cParamAlgo ClassificationAlgo = WindowForClassificationParam.GetSelectedAlgoAndParameters();
            cListValuesParam Parameters = ClassifAlgoParams.GetListValuesParam();

            //Classifier this.CurrentClassifier = null;

            // -------------------------- Classification -------------------------------
            // create the instances
            // InstancesList = this.ListInstances;
            this.attValsWithoutClasses = new FastVector();

            if (IsCellular)
                for (int i = 0; i < cGlobalInfo.ListCellularPhenotypes.Count; i++)
                    this.attValsWithoutClasses.addElement(cGlobalInfo.ListCellularPhenotypes[i].Name);
            else
                for (int i = 0; i < cGlobalInfo.ListWellClasses.Count; i++)
                    this.attValsWithoutClasses.addElement(cGlobalInfo.ListWellClasses[i].Name);

            InstancesList.insertAttributeAt(new weka.core.Attribute("Class", this.attValsWithoutClasses), InstancesList.numAttributes());
            //int A = Classes.Count;
            for (int i = 0; i < Classes.Count; i++)
                InstancesList.get(i).setValue(InstancesList.numAttributes() - 1, Classes[i]);

            InstancesList.setClassIndex(InstancesList.numAttributes() - 1);

            weka.core.Instances train = new weka.core.Instances(InstancesList, 0, InstancesList.numInstances());

            if (PanelForVisualFeedback != null)
                PanelForVisualFeedback.Controls.Clear();

            #region List classifiers

            #region J48
            if (ClassifAlgoParams.Name == "J48")
            {
                this.CurrentClassifier = new weka.classifiers.trees.J48();
                ((J48)this.CurrentClassifier).setMinNumObj((int)Parameters.ListDoubleValues.Get("numericUpDownMinInstLeaf").Value);
                ((J48)this.CurrentClassifier).setConfidenceFactor((float)Parameters.ListDoubleValues.Get("numericUpDownConfFactor").Value);
                ((J48)this.CurrentClassifier).setNumFolds((int)Parameters.ListDoubleValues.Get("numericUpDownNumFolds").Value);
                ((J48)this.CurrentClassifier).setUnpruned((bool)Parameters.ListCheckValues.Get("checkBoxUnPruned").Value);
                ((J48)this.CurrentClassifier).setUseLaplace((bool)Parameters.ListCheckValues.Get("checkBoxLaplacianSmoothing").Value);
                ((J48)this.CurrentClassifier).setSeed((int)Parameters.ListDoubleValues.Get("numericUpDownSeedNumber").Value);
                ((J48)this.CurrentClassifier).setSubtreeRaising((bool)Parameters.ListCheckValues.Get("checkBoxSubTreeRaising").Value);

                //   CurrentClassif.SetJ48Tree((J48)this.CurrentClassifier, Classes.Length);
                this.CurrentClassifier.buildClassifier(train);
                // display results training
                // display tree
                if (PanelForVisualFeedback != null)
                {
                    GViewer GraphView = DisplayTree(GlobalInfo, ((J48)this.CurrentClassifier), IsCellular).gViewerForTreeClassif;
                    GraphView.Size = new System.Drawing.Size(PanelForVisualFeedback.Width, PanelForVisualFeedback.Height);
                    GraphView.Anchor = (AnchorStyles.Bottom | AnchorStyles.Top | AnchorStyles.Left | AnchorStyles.Right);
                    PanelForVisualFeedback.Controls.Clear();
                    PanelForVisualFeedback.Controls.Add(GraphView);
                }
            }
            #endregion
            #region Random Tree
            else if (ClassifAlgoParams.Name == "RandomTree")
            {
                this.CurrentClassifier = new weka.classifiers.trees.RandomTree();

                if ((bool)Parameters.ListCheckValues.Get("checkBoxMaxDepthUnlimited").Value)
                    ((RandomTree)this.CurrentClassifier).setMaxDepth(0);
                else
                    ((RandomTree)this.CurrentClassifier).setMaxDepth((int)Parameters.ListDoubleValues.Get("numericUpDownMaxDepth").Value);
                ((RandomTree)this.CurrentClassifier).setSeed((int)Parameters.ListDoubleValues.Get("numericUpDownSeed").Value);
                ((RandomTree)this.CurrentClassifier).setMinNum((double)Parameters.ListDoubleValues.Get("numericUpDownMinWeight").Value);

                if ((bool)Parameters.ListCheckValues.Get("checkBoxIsBackfitting").Value)
                {
                    ((RandomTree)this.CurrentClassifier).setNumFolds((int)Parameters.ListDoubleValues.Get("numericUpDownBackFittingFolds").Value);
                }
                else
                {
                    ((RandomTree)this.CurrentClassifier).setNumFolds(0);
                }
                this.CurrentClassifier.buildClassifier(train);
                //string StringForTree = ((RandomTree)this.CurrentClassifier).graph().Remove(0, ((RandomTree)this.CurrentClassifier).graph().IndexOf("{") + 2);

                //Microsoft.Msagl.GraphViewerGdi.GViewer GraphView = new GViewer();
                //GraphView.Graph = GlobalInfo.WindowHCSAnalyzer.ComputeAndDisplayGraph(StringForTree);//.Remove(StringForTree.Length - 3, 3));

                //GraphView.Size = new System.Drawing.Size(panelForGraphicalResults.Width, panelForGraphicalResults.Height);
                //GraphView.Anchor = (AnchorStyles.Bottom | AnchorStyles.Top | AnchorStyles.Left | AnchorStyles.Right);
                //this.panelForGraphicalResults.Controls.Clear();
                //this.panelForGraphicalResults.Controls.Add(GraphView);

            }
            #endregion
            #region Random Forest
            else if (ClassifAlgoParams.Name == "RandomForest")
            {
                this.CurrentClassifier = new weka.classifiers.trees.RandomForest();

                if ((bool)Parameters.ListCheckValues.Get("checkBoxMaxDepthUnlimited").Value)
                    ((RandomForest)this.CurrentClassifier).setMaxDepth(0);
                else
                    ((RandomForest)this.CurrentClassifier).setMaxDepth((int)Parameters.ListDoubleValues.Get("numericUpDownMaxDepth").Value);

                ((RandomForest)this.CurrentClassifier).setNumTrees((int)Parameters.ListDoubleValues.Get("numericUpDownNumTrees").Value);
                ((RandomForest)this.CurrentClassifier).setSeed((int)Parameters.ListDoubleValues.Get("numericUpDownSeed").Value);

                this.CurrentClassifier.buildClassifier(train);
            }
            #endregion
            #region KStar
            else if (ClassifAlgoParams.Name == "KStar")
            {
                this.CurrentClassifier = new weka.classifiers.lazy.KStar();
                ((KStar)this.CurrentClassifier).setGlobalBlend((int)Parameters.ListDoubleValues.Get("numericUpDownGlobalBlend").Value);
                ((KStar)this.CurrentClassifier).setEntropicAutoBlend((bool)Parameters.ListCheckValues.Get("checkBoxBlendAuto").Value);
                this.CurrentClassifier.buildClassifier(train);
            }
            #endregion
            #region SVM
            else if (ClassifAlgoParams.Name == "SVM")
            {
                this.CurrentClassifier = new weka.classifiers.functions.SMO();
                ((SMO)this.CurrentClassifier).setC((double)Parameters.ListDoubleValues.Get("numericUpDownC").Value);
                ((SMO)this.CurrentClassifier).setKernel(WindowForClassificationParam.GeneratedKernel);
                ((SMO)this.CurrentClassifier).setRandomSeed((int)Parameters.ListDoubleValues.Get("numericUpDownSeed").Value);
                this.CurrentClassifier.buildClassifier(train);
            }
            #endregion
            #region KNN
            else if (ClassifAlgoParams.Name == "KNN")
            {
                this.CurrentClassifier = new weka.classifiers.lazy.IBk();

                string OptionDistance = " -K " + (int)Parameters.ListDoubleValues.Get("numericUpDownKNN").Value + " -W 0 ";

                string WeightType = (string)Parameters.ListTextValues.Get("comboBoxDistanceWeight").Value;
                switch (WeightType)
                {
                    case "No Weighting":
                        OptionDistance += "";
                        break;
                    case "1/Distance":
                        OptionDistance += "-I";
                        break;
                    case "1-Distance":
                        OptionDistance += "-F";
                        break;
                    default:
                        break;
                }
                OptionDistance += " -A \"weka.core.neighboursearch.LinearNNSearch -A \\\"weka.core.";

                string DistanceType = (string)Parameters.ListTextValues.Get("comboBoxDistance").Value;
                // OptionDistance += " -A \"weka.core.";
                switch (DistanceType)
                {
                    case "Euclidean":
                        OptionDistance += "EuclideanDistance";
                        break;
                    case "Manhattan":
                        OptionDistance += "ManhattanDistance";
                        break;
                    case "Chebyshev":
                        OptionDistance += "ChebyshevDistance";
                        break;
                    default:
                        break;
                }

                if (!(bool)Parameters.ListCheckValues.Get("checkBoxNormalize").Value)
                    OptionDistance += " -D";
                OptionDistance += " -R ";

                OptionDistance += "first-last\\\"\"";
                ((IBk)this.CurrentClassifier).setOptions(weka.core.Utils.splitOptions(OptionDistance));

                //((IBk)this.CurrentClassifier).setKNN((int)Parameters.ListDoubleValues.Get("numericUpDownKNN").Value);
                this.CurrentClassifier.buildClassifier(train);
            }
            #endregion
            #region Multilayer Perceptron
            else if (ClassifAlgoParams.Name == "Perceptron")
            {
                this.CurrentClassifier = new weka.classifiers.functions.MultilayerPerceptron();
                ((MultilayerPerceptron)this.CurrentClassifier).setMomentum((double)Parameters.ListDoubleValues.Get("numericUpDownMomentum").Value);
                ((MultilayerPerceptron)this.CurrentClassifier).setLearningRate((double)Parameters.ListDoubleValues.Get("numericUpDownLearningRate").Value);
                ((MultilayerPerceptron)this.CurrentClassifier).setSeed((int)Parameters.ListDoubleValues.Get("numericUpDownSeed").Value);
                ((MultilayerPerceptron)this.CurrentClassifier).setTrainingTime((int)Parameters.ListDoubleValues.Get("numericUpDownTrainingTime").Value);
                ((MultilayerPerceptron)this.CurrentClassifier).setNormalizeAttributes((bool)Parameters.ListCheckValues.Get("checkBoxNormAttribute").Value);
                ((MultilayerPerceptron)this.CurrentClassifier).setNormalizeNumericClass((bool)Parameters.ListCheckValues.Get("checkBoxNormNumericClasses").Value);
                this.CurrentClassifier.buildClassifier(train);
            }
            #endregion
            #region ZeroR
            else if (ClassifAlgoParams.Name == "ZeroR")
            {
                this.CurrentClassifier = new weka.classifiers.rules.OneR();
                this.CurrentClassifier.buildClassifier(train);
            }
            #endregion
            #region OneR
            else if (ClassifAlgoParams.Name == "OneR")
            {
                this.CurrentClassifier = new weka.classifiers.rules.OneR();
                ((OneR)this.CurrentClassifier).setMinBucketSize((int)Parameters.ListDoubleValues.Get("numericUpDownMinBucketSize").Value);
                this.CurrentClassifier.buildClassifier(train);
            }
            #endregion
            #region Naive Bayes
            else if (ClassifAlgoParams.Name == "NaiveBayes")
            {
                this.CurrentClassifier = new weka.classifiers.bayes.NaiveBayes();
                ((NaiveBayes)this.CurrentClassifier).setUseKernelEstimator((bool)Parameters.ListCheckValues.Get("checkBoxKernelEstimator").Value);
                this.CurrentClassifier.buildClassifier(train);
            }
            #endregion
            #region Logistic
            else if (ClassifAlgoParams.Name == "Logistic")
            {
                this.CurrentClassifier = new weka.classifiers.functions.Logistic();
                ((Logistic)this.CurrentClassifier).setUseConjugateGradientDescent((bool)Parameters.ListCheckValues.Get("checkBoxUseConjugateGradientDescent").Value);
                ((Logistic)this.CurrentClassifier).setRidge((double)Parameters.ListDoubleValues.Get("numericUpDownRidge").Value);
                this.CurrentClassifier.buildClassifier(train);
            }
            #endregion
            //weka.classifiers.functions.SMO
            //BayesNet

            #endregion

            if (TextBoxForFeedback != null)
            {
                TextBoxForFeedback.Clear();
                TextBoxForFeedback.AppendText(this.CurrentClassifier.ToString());
            }

            TextBoxForFeedback.AppendText("\n" + (InstancesList.numAttributes() - 1) + " attributes:\n\n");
            for (int IdxAttributes = 0; IdxAttributes < InstancesList.numAttributes() - 1; IdxAttributes++)
            {
                TextBoxForFeedback.AppendText(IdxAttributes + "\t: " + InstancesList.attribute(IdxAttributes).name() + "\n");
            }

            #region evaluation of the model and results display

            if ((WindowForClassificationParam.numericUpDownFoldNumber.Enabled) && (TextBoxForFeedback != null))
            {

                TextBoxForFeedback.AppendText("\n-----------------------------\nModel validation\n-----------------------------\n");
                ModelEvaluation = new weka.classifiers.Evaluation(InstancesList);
                ModelEvaluation.crossValidateModel(this.CurrentClassifier, InstancesList, (int)WindowForClassificationParam.numericUpDownFoldNumber.Value, new java.util.Random(1));
                TextBoxForFeedback.AppendText(ModelEvaluation.toSummaryString());
                TextBoxForFeedback.AppendText("\n-----------------------------\nConfusion Matrix:\n-----------------------------\n");
                double[][] ConfusionMatrix = ModelEvaluation.confusionMatrix();

                string NewLine = "";
                for (int i = 0; i < ConfusionMatrix[0].Length; i++)
                {
                    NewLine += "c" + i + "\t";
                }
                TextBoxForFeedback.AppendText(NewLine + "\n\n");

                for (int j = 0; j < ConfusionMatrix.Length; j++)
                {
                    NewLine = "";
                    for (int i = 0; i < ConfusionMatrix[0].Length; i++)
                    {
                        NewLine += ConfusionMatrix[j][i] + "\t";
                    }
                    // if
                    TextBoxForFeedback.AppendText(NewLine + "| c" + j + " <=> " + cGlobalInfo.ListCellularPhenotypes[j].Name + "\n");
                }
            }
            #endregion

            return this.CurrentClassifier;
        }
Ejemplo n.º 31
-1
        public static void Test_predictClass(string classifierFileName)
        {
            FileReader javaFileReader = new FileReader(classifierFileName);

            weka.core.Instances insts = new weka.core.Instances(javaFileReader);
            javaFileReader.close();

            insts.setClassIndex(insts.numAttributes() - 1);

            weka.classifiers.Classifier cl = new weka.classifiers.trees.J48();
            System.Console.WriteLine("Performing " + percentSplit + "% split evaluation.");



            #region Manual Cross Fold
            Instances foldsData = new Instances(insts);
            int       folds     = 10;
            for (int n = 0; n < folds; n++)
            {
                Instances trainFold = foldsData.trainCV(folds, n);
                Instances testFold  = foldsData.testCV(folds, n);
            }
            #endregion



            #region
            int trainSize             = insts.numInstances() * percentSplit / 100;
            int testSize              = insts.numInstances() - trainSize;
            weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);

            cl.buildClassifier(train);
            #endregion

            //Classifier cls = new J48();
            Evaluation       eval = new Evaluation(insts);
            java.util.Random rand = new java.util.Random(1);  // using seed = 1
            int fold = 10;
            eval.crossValidateModel(cl, insts, fold, rand);
            System.Console.WriteLine("toClassDetailsString" + eval.toClassDetailsString());
            System.Console.WriteLine("toMatrixString\n" + eval.toMatrixString());
            System.Console.WriteLine("toCumulativeMarginDistributionString\n" + eval.toCumulativeMarginDistributionString());
            //System.Console.WriteLine("predictions\n" + eval.predictions());
            System.Console.ReadKey();

            //var numnerOfInst = insts.numInstances();

            //for (int i = trainSize; i < numnerOfInst; i++)
            //{
            //    weka.core.Instance currentInst = insts.instance(i);

            //    double pred = cl.classifyInstance(currentInst);
            //    System.Console.WriteLine("class Index: " + insts.instance(i).classIndex());
            //    System.Console.WriteLine(", class value: " + insts.instance(i).classValue());
            //    System.Console.WriteLine(", ID: " + insts.instance(i).value(0));
            //    System.Console.WriteLine(", actual: " + insts.classAttribute().value((int)insts.instance(i).classValue()));
            //    System.Console.WriteLine(", predicted: " + insts.classAttribute().value((int)pred));

            //}
        }