예제 #1
0
        /// <summary>
        /// Creates a classifier of the desired type from an .arff file
        /// </summary>
        /// <param name="ARFFfile">The arff file to read from. Should be a full path.</param>
        /// <param name="classifier">The type of classifier you want to make.</param>
        /// <returns>The classifier you created</returns>
        public void createModel(string ARFFfile, Classifier myClassifier)
        {
            if (debug)
            {
                Console.WriteLine("Loading ARFF file " + ARFFfile);
            }

            _classifier = GetClassifier(myClassifier);
            try
            {
                _dataSet = new weka.core.Instances(new java.io.FileReader(ARFFfile));
                if (debug)
                {
                    Console.WriteLine("You have " + _dataSet.numAttributes() + " attributes.");
                }
                _dataSet.setClassIndex(_dataSet.numAttributes() - 1);

                _classifier.buildClassifier(_dataSet);

                if (debug)
                {
                    Console.WriteLine(_classifier.toString());
                }
            }
            catch (Exception e)
            {
                Console.WriteLine("You failed. End of Game. Poor Weka.");
                Console.WriteLine(e);
            }
        }
    public void UpdateClassifier(double[] newMapData, int lastRating)
    {
        try
        {
            // Copy data to a new array and add the rating (the class of this instance)
            double [] fullData = new double[newMapData.Length + 1];
            for (int i = 0; i < newMapData.Length; i++)
            {
                fullData[i] = newMapData[i];
            }
            //*********fullData[fullData.Length-1] = (double) lastRating;
            fullData[fullData.Length - 1] = (double)((lastRating - 1) / 3);
            //Debug.LogWarning(fullData[fullData.Length-1]);
            double weight = 0;
            if (lastRating == 1 || lastRating == 6)
            {
                weight = 2;
            }
            else if (lastRating == 2 || lastRating == 5)
            {
                weight = 1;
            }
            else
            {
                weight = 0.5;
            }

            // Naive Bayes defaults all data to weight of 1, do same for this instance
            //*******weka.core.Instance newInstance = new weka.core.Instance(1,fullData);
            weka.core.Instance newInstance = new weka.core.Instance(weight, fullData);
            playerData.add(newInstance);

            // This version of Naive Bayes is not updateable, so just rebuild the classifier
            // Updateable version has slightly lower accuracy
            classifier.buildClassifier(playerData);
        }
        catch (java.lang.Exception ex)
        {
            Debug.LogError(ex.getMessage());
        }
    }
        public static double SupportVectorMachineTest(weka.core.Instances insts)
        {
            try
            {
                //weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader("iris.arff"));

                insts.setClassIndex(insts.numAttributes() - 1);


                SupportVectorMachine = new weka.classifiers.functions.SMO();

                weka.filters.Filter myDummy = new weka.filters.unsupervised.attribute.NominalToBinary();

                myDummy.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myDummy);


                weka.filters.Filter myNormalize = new weka.filters.unsupervised.instance.Normalize();
                myNormalize.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myNormalize);

                weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                myRandom.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myRandom);

                int trainSize             = insts.numInstances() * percentSplit / 100;
                int testSize              = insts.numInstances() - trainSize;
                weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);


                SupportVectorMachine.buildClassifier(train);


                int numCorrect = 0;
                for (int i = trainSize; i < insts.numInstances(); i++)
                {
                    weka.core.Instance currentInst    = insts.instance(i);
                    double             predictedClass = SupportVectorMachine.classifyInstance(currentInst);
                    if (predictedClass == insts.instance(i).classValue())
                    {
                        numCorrect++;
                    }
                }
                return((double)numCorrect / (double)testSize * 100.0);
            }
            catch (java.lang.Exception ex)
            {
                ex.printStackTrace();
                return(0);
            }
        }
    protected void Button2_Click(object sender, EventArgs e)
    {
        weka.core.Instances data = new weka.core.Instances(new java.io.FileReader("d:\\train.arff"));
        data.setClassIndex(data.numAttributes() - 1);
        weka.classifiers.Classifier cls = new weka.classifiers.bayes.NaiveBayes();
        // weka.classifiers.functions.supportVector.SMOset();
        int runs  = 1;
        int folds = 10;

        //string sq = "delete from nbresults";
        //dbc.execfn(sq);
        // perform cross-validation
        for (int i = 0; i < runs; i++)
        {
            // randomize data
            int seed = i + 1;
            java.util.Random    rand     = new java.util.Random(seed);
            weka.core.Instances randData = new weka.core.Instances(data);
            randData.randomize(rand);
            if (randData.classAttribute().isNominal())
            {
                randData.stratify(folds);
            }
            // weka.classifiers.trees.j48 jj;
            weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(randData);
            for (int n = 0; n < folds; n++)
            {
                weka.core.Instances train = randData.trainCV(folds, n);
                weka.core.Instances test  = randData.testCV(folds, n);
                // build and evaluate classifier
                weka.classifiers.Classifier clsCopy = weka.classifiers.Classifier.makeCopy(cls);
                clsCopy.buildClassifier(train);

                eval.evaluateModel(clsCopy, test);
            }

            preci_value.Text  = eval.precision(0).ToString();
            recall_value.Text = eval.recall(0).ToString();
            acc_value.Text    = eval.fMeasure(0).ToString();

            string s = "NB";
            //    string str = "insert into evaluation values('" + instid.Text + "','" + courid.Text.ToString() + "','" + preci_value.Text.ToString() + "','" + recall_value.Text.ToString() + "','" + acc_value.Text.ToString() + "','" + s + "' )";
            //  db.execfn(str);
            //  MessageBox.Show("saved");
        }
    }
        public static double NaiveBayesTest(weka.core.Instances insts)
        {
            try
            {
                //weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader("iris.arff"));

                insts.setClassIndex(insts.numAttributes() - 1);


                NaiveBayescl = new weka.classifiers.bayes.NaiveBayes();


                //discretize
                weka.filters.Filter myDiscretize = new weka.filters.unsupervised.attribute.Discretize();
                myDiscretize.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myDiscretize);

                weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                myRandom.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myRandom);

                int trainSize             = insts.numInstances() * percentSplit / 100;
                int testSize              = insts.numInstances() - trainSize;
                weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);

                NaiveBayescl.buildClassifier(train);


                int numCorrect = 0;
                for (int i = trainSize; i < insts.numInstances(); i++)
                {
                    weka.core.Instance currentInst    = insts.instance(i);
                    double             predictedClass = NaiveBayescl.classifyInstance(currentInst);
                    if (predictedClass == insts.instance(i).classValue())
                    {
                        numCorrect++;
                    }
                }
                return((double)numCorrect / (double)testSize * 100.0);
            }
            catch (java.lang.Exception ex)
            {
                ex.printStackTrace();
                return(0);
            }
        }
예제 #6
0
        //Artificial NN
        public static double ArtificialNN(weka.core.Instances insts)
        {
            try
            {
                insts.setClassIndex(insts.numAttributes() - 1);

                Anncl = new weka.classifiers.functions.MultilayerPerceptron();

                weka.filters.Filter myDummy = new weka.filters.unsupervised.attribute.NominalToBinary();
                myDummy.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myDummy);

                weka.filters.Filter myNormalize = new weka.filters.unsupervised.instance.Normalize();
                myNormalize.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myNormalize);

                weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                myRandom.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myRandom);

                int trainSize             = insts.numInstances() * percentSplit / 100;
                int testSize              = insts.numInstances() - trainSize;
                weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);

                Anncl.buildClassifier(train);

                int numCorrect = 0;
                for (int i = trainSize; i < insts.numInstances(); i++)
                {
                    weka.core.Instance currentInst    = insts.instance(i);
                    double             predictedClass = Anncl.classifyInstance(currentInst);
                    if (predictedClass == insts.instance(i).classValue())
                    {
                        numCorrect++;
                    }
                }
                return((double)numCorrect / (double)testSize * 100.0);
            }
            catch (java.lang.Exception ex)
            {
                ex.printStackTrace();
                return(0);
            }
        }
    /* Use when the player logs in to initially create the classifier with data from server */
    public void InitializeClassifier(String dataString)
    {
        try {
            java.io.StringReader   stringReader = new java.io.StringReader(dataString);
            java.io.BufferedReader buffReader   = new java.io.BufferedReader(stringReader);

            playerData = new weka.core.Instances(buffReader);

            /* State where in each Instance the class attribute is, if its not already specified by the file */
            if (playerData.classIndex() == -1)
            {
                playerData.setClassIndex(playerData.numAttributes() - 1);
            }

            /* NAIVE BAYES */
            //classifier = new weka.classifiers.bayes.NaiveBayes();

            /* NEURAL NET */
            //classifier = new weka.classifiers.functions.MultilayerPerceptron();
            //((weka.classifiers.functions.MultilayerPerceptron)classifier).setHiddenLayers("12");

            /* J48 TREE */
            //classifier = new weka.classifiers.trees.J48();

            /* IB1 NEAREST NEIGHBOUR */
            //classifier = new weka.classifiers.lazy.IB1();

            /* RANDOM FOREST */
            classifier = new weka.classifiers.trees.RandomForest();


            classifier.buildClassifier(playerData);
            Debug.Log("Initialized Classifier");
        }
        catch (java.lang.Exception ex)
        {
            Debug.LogError(ex.getMessage());
        }
    }
예제 #8
0
        //Random Forest
        public static double RandomForestTest(weka.core.Instances insts)
        {
            try
            {
                insts.setClassIndex(insts.numAttributes() - 1);

                RandomForestcl = new weka.classifiers.trees.RandomForest();

                weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                myRandom.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myRandom);

                int trainSize             = insts.numInstances() * percentSplit / 100;
                int testSize              = insts.numInstances() - trainSize;
                weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);

                RandomForestcl.buildClassifier(train);


                int numCorrect = 0;
                for (int i = trainSize; i < insts.numInstances(); i++)
                {
                    weka.core.Instance currentInst    = insts.instance(i);
                    double             predictedClass = RandomForestcl.classifyInstance(currentInst);
                    if (predictedClass == insts.instance(i).classValue())
                    {
                        numCorrect++;
                    }
                }
                return((double)numCorrect / (double)testSize * 100.0);
            }
            catch (java.lang.Exception ex)
            {
                ex.printStackTrace();
                return(0);
            }
        }
예제 #9
0
파일: Form1.cs 프로젝트: wushian/MLEA
        private void Train(string str)
        {
            if (string.IsNullOrEmpty(str))
                return;
            m_cls = CreateClassifier(str);
            if (m_cls == null)
            {
                MessageBox.Show("Can't Create Classifier!");
                return;
            }

            var trainInstances = CreateCurrentInstances();
            m_cls.buildClassifier(trainInstances);

            // TEST
            var data = CreateEmptyInstances();
            StringBuilder sb = new StringBuilder();

            if (m_cls is MLEA.IBatchClassifier)
            {
                MLEA.IBatchClassifier batchClassifier = m_cls as MLEA.IBatchClassifier;
                for (int i = 0; i < XLEN; i++)
                {
                    for (int j = 0; j < YLEN; j++)
                    {
                        var vals = new double[data.numAttributes()];
                        vals[0] = (double)i / XLEN;
                        vals[1] = (double)j / YLEN;

                        var instance = new weka.core.DenseInstance(1.0, vals);
                        data.add(instance);
                        instance.setDataset(data);
                    }
                }

                double[] ds = batchClassifier.classifyInstances(data);
                for (int i = 0; i < XLEN; i++)
                {
                    for (int j = 0; j < YLEN; j++)
                    {
                        double d = ds[i * XLEN + j];

                        if (m_enableLog)
                        {
                            string s = string.Format("{0}, {1}: {2}", data.instance(i * XLEN + j).value(0).ToString("N2"), data.instance(i * XLEN + j).value(1).ToString("N2"), d.ToString("N0"));
                            sb.AppendLine(s);
                        }

                        for (int ii = 0; ii < WXLEN / XLEN; ++ii)
                            for (int jj = 0; jj < WYLEN / YLEN; ++jj)
                                m_pictureBoxBitmap.SetPixel(i * WXLEN / XLEN + ii, j * WYLEN / YLEN + jj, GetValueColor((int)d, false));
                    }
                }
            }
            else
            {
                for (int i = 0; i < XLEN; i++)
                {
                    for (int j = 0; j < YLEN; j++)
                    {
                        var vals = new double[data.numAttributes()];
                        vals[0] = (double)i / XLEN;
                        vals[1] = (double)j / YLEN;

                        var instance = new weka.core.DenseInstance(1.0, vals);
                        data.add(instance);
                        instance.setDataset(data);

                        double d = m_cls.classifyInstance(instance);

                        if (m_enableLog)
                        {
                            string s = string.Format("{0}, {1}: {2}", vals[0].ToString("N2"), vals[1].ToString("N2"), d.ToString("N0"));
                            sb.AppendLine(s);
                        }

                        for (int ii = 0; ii < WXLEN / XLEN; ++ii)
                            for (int jj = 0; jj < WYLEN / YLEN; ++jj)
                                m_pictureBoxBitmap.SetPixel(i * WXLEN / XLEN + ii, j * WYLEN / YLEN + jj, GetValueColor((int)d, false));
                    }
                }
            }
            draw_all_points();

            this.Invoke(new Action(() =>
                {
                    if (m_enableLog)
                    {
                        txtLog.AppendText(sb.ToString());
                    }
                }));

            if (m_enableEvaluation)
            {
                Test(trainInstances);
            }
        }
예제 #10
0
        private void Train(string str)
        {
            if (string.IsNullOrEmpty(str))
            {
                return;
            }
            m_cls = CreateClassifier(str);
            if (m_cls == null)
            {
                MessageBox.Show("Can't Create Classifier!");
                return;
            }

            var trainInstances = CreateCurrentInstances();

            m_cls.buildClassifier(trainInstances);

            // TEST
            var           data = CreateEmptyInstances();
            StringBuilder sb   = new StringBuilder();

            if (m_cls is MLEA.IBatchClassifier)
            {
                MLEA.IBatchClassifier batchClassifier = m_cls as MLEA.IBatchClassifier;
                for (int i = 0; i < XLEN; i++)
                {
                    for (int j = 0; j < YLEN; j++)
                    {
                        var vals = new double[data.numAttributes()];
                        vals[0] = (double)i / XLEN;
                        vals[1] = (double)j / YLEN;

                        var instance = new weka.core.DenseInstance(1.0, vals);
                        data.add(instance);
                        instance.setDataset(data);
                    }
                }

                double[] ds = batchClassifier.classifyInstances(data);
                for (int i = 0; i < XLEN; i++)
                {
                    for (int j = 0; j < YLEN; j++)
                    {
                        double d = ds[i * XLEN + j];

                        if (m_enableLog)
                        {
                            string s = string.Format("{0}, {1}: {2}", data.instance(i * XLEN + j).value(0).ToString("N2"), data.instance(i * XLEN + j).value(1).ToString("N2"), d.ToString("N0"));
                            sb.AppendLine(s);
                        }

                        for (int ii = 0; ii < WXLEN / XLEN; ++ii)
                        {
                            for (int jj = 0; jj < WYLEN / YLEN; ++jj)
                            {
                                m_pictureBoxBitmap.SetPixel(i * WXLEN / XLEN + ii, j * WYLEN / YLEN + jj, GetValueColor((int)d, false));
                            }
                        }
                    }
                }
            }
            else
            {
                for (int i = 0; i < XLEN; i++)
                {
                    for (int j = 0; j < YLEN; j++)
                    {
                        var vals = new double[data.numAttributes()];
                        vals[0] = (double)i / XLEN;
                        vals[1] = (double)j / YLEN;

                        var instance = new weka.core.DenseInstance(1.0, vals);
                        data.add(instance);
                        instance.setDataset(data);

                        double d = m_cls.classifyInstance(instance);

                        if (m_enableLog)
                        {
                            string s = string.Format("{0}, {1}: {2}", vals[0].ToString("N2"), vals[1].ToString("N2"), d.ToString("N0"));
                            sb.AppendLine(s);
                        }

                        for (int ii = 0; ii < WXLEN / XLEN; ++ii)
                        {
                            for (int jj = 0; jj < WYLEN / YLEN; ++jj)
                            {
                                m_pictureBoxBitmap.SetPixel(i * WXLEN / XLEN + ii, j * WYLEN / YLEN + jj, GetValueColor((int)d, false));
                            }
                        }
                    }
                }
            }
            draw_all_points();

            this.Invoke(new Action(() =>
            {
                if (m_enableLog)
                {
                    txtLog.AppendText(sb.ToString());
                }
            }));

            if (m_enableEvaluation)
            {
                Test(trainInstances);
            }
        }
예제 #11
0
    //**************************************************************************************

    /// <summary>
    ///	Build cllasifier model and save it to a file.
    /// </summary>
    public override void Build(CandlestickCollection iCandlestick)
    {
        List <int> trainingPoints = null;

        // Calculate average profit and std dev
        if (J48Info.ProfitAverage is null || J48Info.ProfitStdDev is null)
        {
            trainingPoints = LoadTrainingPoints(iCandlestick, ID, ProfitTime);
            float[] profits = FullToTraining(new List <float>(CalculateFutureProfits(iCandlestick[kTrainingPeriod], ProfitTime)), trainingPoints).ToArray();
            J48Info.ProfitStdDev  = Statistics.StandardDeviation(profits);
            J48Info.ProfitAverage = J48Info.ParentID is null ? 0.0f : Statistics.ArithmeticMean(profits);
            WekaJ48Info.UpdateDB(J48Info);
        }

        // Build model
        if (!File.Exists(ModelFilename))
        {
            OutputMessage("Building model");

            if (trainingPoints is null)
            {
                trainingPoints = LoadTrainingPoints(iCandlestick, ID, ProfitTime);
            }

            Model = new weka.classifiers.trees.J48();
            Model.buildClassifier(CreateInstances(iCandlestick, trainingPoints, Attributes, Parameters, Period, ProfitTime));
            weka.core.SerializationHelper.write(ModelFilename, Model);
        }

        // Perfrom crossfold test
        if (J48Info.Precision is null)
        {
            if (Model is null)
            {
                LoadModel();
            }

            OutputMessage("Perfroming crossfold");

            if (trainingPoints is null)
            {
                trainingPoints = LoadTrainingPoints(iCandlestick, ID, ProfitTime);
            }

            var instances  = CreateInstances(iCandlestick, trainingPoints, Attributes, Parameters, Period, ProfitTime);
            var evaluation = new weka.classifiers.Evaluation(instances);
            evaluation.crossValidateModel(Model, instances, 10, new java.util.Random(0));

            J48Info.Precision = (float)evaluation.pctCorrect();

            WekaJ48Info.UpdateDB(J48Info);
        }

        // Perfrom singular test
        if (J48Info.IsSingular == null)
        {
            if (Model is null)
            {
                LoadModel();
            }

            OutputMessage("Perfroming singular test");

            var results = new SortedList <Prediction, List <int> >();
            foreach (Prediction p in (Prediction[])Enum.GetValues(typeof(Prediction)))
            {
                results.Add(p, new List <int>());
            }

            if (trainingPoints is null)
            {
                trainingPoints = LoadTrainingPoints(iCandlestick, ID, ProfitTime);
            }

            var parameters = CalculateParameters(Parameters, iCandlestick, trainingPoints, Period);

            for (int k = 0; k < parameters.Count; k++)
            {
                var pred = Predict(parameters[k]);
                results[pred].Add(trainingPoints[k]);
            }

            J48Info.IsSingular = results.Count(x => x.Value.Count > 0) <= 1;

            WekaJ48Info.UpdateDB(J48Info);
        }

        // Calculating prediction profits
        if (J48Info.PredictionProfits.Count(x => x != null) == 0)
        {
            if (Model is null)
            {
                LoadModel();
            }

            OutputMessage("Calculating prediction profits");

            if (trainingPoints is null)
            {
                trainingPoints = LoadTrainingPoints(iCandlestick, ID, ProfitTime);
            }

            var predictionPoints = GetHistoricalPredictionPoints(iCandlestick, trainingPoints);

            foreach (Prediction p in (Prediction[])Enum.GetValues(typeof(Prediction)))
            {
                float[] profits = FullToTraining(new List <float>(CalculateFutureProfits(iCandlestick[kTrainingPeriod], ProfitTime)), predictionPoints[p]).ToArray();

                if (profits.Length < 10)
                {
                    J48Info.PredictionProfits[(int)p] = DecisionToFutureProfit(p, (float)J48Info.ProfitStdDev, (float)J48Info.ProfitAverage);
                }
                else
                {
                    J48Info.PredictionProfits[(int)p] = Statistics.ArithmeticMean(profits);
                }
            }

            WekaJ48Info.UpdateDB(J48Info);
        }

        // Create children
        if (!J48Info.ReproductionComplete.GetValueOrDefault(false))
        {
            lock (this)
            {
                if (J48Info.Precision > 50.0f && !J48Info.IsSingular.GetValueOrDefault(false))
                {
                    OutputMessage("Creating children");

                    if (trainingPoints is null)
                    {
                        trainingPoints = LoadTrainingPoints(iCandlestick, ID, ProfitTime);
                    }

                    var predictionPoints = GetHistoricalPredictionPoints(iCandlestick, trainingPoints);

                    foreach (Prediction p in (Prediction[])Enum.GetValues(typeof(Prediction)))
                    {
                        if (predictionPoints[p] != null && predictionPoints[p].Count >= 1000 && J48Info.ChildrenID[(int)p] == null)
                        {
                            var child = CreateNew(ParametersID, Parameters, Period, ProfitTime, predictionPoints[p]);

                            // Set parent
                            child.J48Info.ParentID = ID;
                            WekaJ48Info.UpdateDB(child.J48Info);

                            // Update parent info
                            J48Info.ChildrenID[(int)p] = (int)child.ID;
                            WekaJ48Info.UpdateDB(J48Info);
                            childs[(int)p] = child;
                        }
                    }
                }

                J48Info.ReproductionComplete = true;
                WekaJ48Info.UpdateDB(J48Info);
            }
        }
    }
예제 #12
0
파일: TheWeka.cs 프로젝트: icelab-uki/uki
 static public void step_train()
 {
     weka.core.Instances train = new weka.core.Instances(insts, 0, split_trainSize);
     classifier.buildClassifier(train);
 }
예제 #13
0
        private void button1_Click(object sender, EventArgs e)
        {
            OpenFileDialog file = new OpenFileDialog();

            if (file.ShowDialog() == DialogResult.OK)
            {
                string filename = file.FileName;
                string filee    = Path.GetFileName(filename);
                bool   attributeType;
                string attributeName      = " ";
                int    numAttributeValue  = 0;
                string attributeValueName = " ";

                textBox1.Text = filee + " chosen succesfully!";

                ///////Decision Tree
                weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader(filename));


                insts.setClassIndex(insts.numAttributes() - 1);

                //find nominal or numeric attributes and create dropbox or textbox
                int numofAttributes = insts.numAttributes() - 1;
                for (int i = 0; i < numofAttributes; i++)
                {
                    attributeType = insts.attribute(i).isNumeric();
                    attributeName = insts.attribute(i).name();
                    dataGridView1.Rows.Add(attributeName);
                    if (attributeType == true)
                    {
                    }
                    else
                    {
                        numAttributeValue = insts.attribute(i).numValues();
                        string[] name = new string[numAttributeValue];
                        for (int j = 0; j < numAttributeValue; j++)
                        {
                            attributeValueName = insts.attribute(i).value(j);
                            name[j]           += attributeValueName;
                        }
                        DataGridViewComboBoxCell combo = new DataGridViewComboBoxCell();
                        combo.DataSource = name.ToList();
                        dataGridView1.Rows[i].Cells[1] = combo;
                    }
                }

                cl = new weka.classifiers.trees.J48();

                textBox2.Text = "Performing " + percentSplit + "% split evaluation.";

                //filling missing values
                weka.filters.Filter missingval = new weka.filters.unsupervised.attribute.ReplaceMissingValues();
                missingval.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, missingval);

                weka.filters.Filter myNormalized = new weka.filters.unsupervised.instance.Normalize();
                myNormalized.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myNormalized);


                //randomize the order of the instances in the dataset.
                weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                myRandom.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myRandom);

                int trainSize             = insts.numInstances() * percentSplit / 100;
                int testSize              = insts.numInstances() - trainSize;
                weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);

                cl.buildClassifier(train);

                string str = cl.toString();

                int numCorrect = 0;
                for (int i = trainSize; i < insts.numInstances(); i++)
                {
                    weka.core.Instance currentInst    = insts.instance(i);
                    double             predictedClass = cl.classifyInstance(currentInst);
                    if (predictedClass == insts.instance(i).classValue())
                    {
                        numCorrect++;
                    }
                }
                textBox3.Text = numCorrect + " out of " + testSize + " correct (" +
                                (double)((double)numCorrect / (double)testSize * 100.0) + "%)";



                //////////Naive Bayes

                //dosya okuma
                weka.core.Instances insts2 = new weka.core.Instances(new java.io.FileReader(filename));
                insts2.setClassIndex(insts2.numAttributes() - 1);

                //naive bayes
                cl2 = new weka.classifiers.bayes.NaiveBayes();


                //filling missing values
                weka.filters.Filter missingval2 = new weka.filters.unsupervised.attribute.ReplaceMissingValues();
                missingval2.setInputFormat(insts2);
                insts2 = weka.filters.Filter.useFilter(insts2, missingval2);

                //for naive bayes
                weka.filters.Filter discrete2 = new weka.filters.unsupervised.attribute.Discretize();
                discrete2.setInputFormat(insts2);
                insts2 = weka.filters.Filter.useFilter(insts2, discrete2);

                //randomize the order of the instances in the dataset. -ortak
                weka.filters.Filter myRandom2 = new weka.filters.unsupervised.instance.Randomize();
                myRandom2.setInputFormat(insts2);
                insts2 = weka.filters.Filter.useFilter(insts2, myRandom2);

                //ortak
                int trainSize2             = insts2.numInstances() * percentSplit / 100;
                int testSize2              = insts2.numInstances() - trainSize2;
                weka.core.Instances train2 = new weka.core.Instances(insts2, 0, trainSize2);

                cl2.buildClassifier(train2);

                string str2 = cl2.toString();

                int numCorrect2 = 0;
                for (int i = trainSize2; i < insts2.numInstances(); i++)
                {
                    weka.core.Instance currentInst2    = insts2.instance(i);
                    double             predictedClass2 = cl2.classifyInstance(currentInst2);
                    if (predictedClass2 == insts2.instance(i).classValue())
                    {
                        numCorrect2++;
                    }
                }
                textBox4.Text = numCorrect2 + " out of " + testSize2 + " correct (" +
                                (double)((double)numCorrect2 / (double)testSize2 * 100.0) + "%)";


                /////////K-Nearest Neigbour

                //dosya okuma
                weka.core.Instances insts3 = new weka.core.Instances(new java.io.FileReader(filename));
                insts3.setClassIndex(insts3.numAttributes() - 1);

                cl3 = new weka.classifiers.lazy.IBk();


                //filling missing values
                weka.filters.Filter missingval3 = new weka.filters.unsupervised.attribute.ReplaceMissingValues();
                missingval3.setInputFormat(insts3);
                insts3 = weka.filters.Filter.useFilter(insts3, missingval3);

                //Convert to dummy attribute knn,svm,neural network
                weka.filters.Filter dummy3 = new weka.filters.unsupervised.attribute.NominalToBinary();
                dummy3.setInputFormat(insts3);
                insts3 = weka.filters.Filter.useFilter(insts3, dummy3);

                //normalize numeric attribute
                weka.filters.Filter myNormalized3 = new weka.filters.unsupervised.instance.Normalize();
                myNormalized3.setInputFormat(insts3);
                insts3 = weka.filters.Filter.useFilter(insts3, myNormalized3);

                //randomize the order of the instances in the dataset.
                weka.filters.Filter myRandom3 = new weka.filters.unsupervised.instance.Randomize();
                myRandom3.setInputFormat(insts3);
                insts3 = weka.filters.Filter.useFilter(insts3, myRandom3);

                int trainSize3             = insts3.numInstances() * percentSplit / 100;
                int testSize3              = insts3.numInstances() - trainSize3;
                weka.core.Instances train3 = new weka.core.Instances(insts3, 0, trainSize3);

                cl3.buildClassifier(train3);

                string str3 = cl3.toString();

                int numCorrect3 = 0;
                for (int i = trainSize3; i < insts3.numInstances(); i++)
                {
                    weka.core.Instance currentInst3    = insts3.instance(i);
                    double             predictedClass3 = cl3.classifyInstance(currentInst3);
                    if (predictedClass3 == insts3.instance(i).classValue())
                    {
                        numCorrect3++;
                    }
                }
                textBox5.Text = numCorrect3 + " out of " + testSize3 + " correct (" +
                                (double)((double)numCorrect3 / (double)testSize3 * 100.0) + "%)";

                //////////Artificial neural network
                //dosya okuma
                weka.core.Instances insts4 = new weka.core.Instances(new java.io.FileReader(filename));
                insts4.setClassIndex(insts4.numAttributes() - 1);

                cl4 = new weka.classifiers.functions.MultilayerPerceptron();


                //filling missing values
                weka.filters.Filter missingval4 = new weka.filters.unsupervised.attribute.ReplaceMissingValues();
                missingval4.setInputFormat(insts4);
                insts4 = weka.filters.Filter.useFilter(insts4, missingval4);

                //Convert to dummy attribute
                weka.filters.Filter dummy4 = new weka.filters.unsupervised.attribute.NominalToBinary();
                dummy4.setInputFormat(insts4);
                insts4 = weka.filters.Filter.useFilter(insts4, dummy4);

                //normalize numeric attribute
                weka.filters.Filter myNormalized4 = new weka.filters.unsupervised.instance.Normalize();
                myNormalized4.setInputFormat(insts4);
                insts4 = weka.filters.Filter.useFilter(insts4, myNormalized4);

                //randomize the order of the instances in the dataset.
                weka.filters.Filter myRandom4 = new weka.filters.unsupervised.instance.Randomize();
                myRandom4.setInputFormat(insts4);
                insts4 = weka.filters.Filter.useFilter(insts4, myRandom4);

                int trainSize4             = insts4.numInstances() * percentSplit / 100;
                int testSize4              = insts4.numInstances() - trainSize4;
                weka.core.Instances train4 = new weka.core.Instances(insts4, 0, trainSize4);

                cl4.buildClassifier(train4);

                string str4 = cl4.toString();

                int numCorrect4 = 0;
                for (int i = trainSize4; i < insts4.numInstances(); i++)
                {
                    weka.core.Instance currentInst4    = insts4.instance(i);
                    double             predictedClass4 = cl4.classifyInstance(currentInst4);
                    if (predictedClass4 == insts4.instance(i).classValue())
                    {
                        numCorrect4++;
                    }
                }

                textBox6.Text = numCorrect4 + " out of " + testSize4 + " correct (" +
                                (double)((double)numCorrect4 / (double)testSize4 * 100.0) + "%)";



                ///////Support Vector Machine
                // dosya okuma
                weka.core.Instances insts5 = new weka.core.Instances(new java.io.FileReader(filename));
                insts5.setClassIndex(insts5.numAttributes() - 1);

                cl5 = new weka.classifiers.functions.SMO();


                //filling missing values
                weka.filters.Filter missingval5 = new weka.filters.unsupervised.attribute.ReplaceMissingValues();
                missingval5.setInputFormat(insts5);
                insts5 = weka.filters.Filter.useFilter(insts5, missingval5);

                //Convert to dummy attribute
                weka.filters.Filter dummy5 = new weka.filters.unsupervised.attribute.NominalToBinary();
                dummy5.setInputFormat(insts5);
                insts5 = weka.filters.Filter.useFilter(insts5, dummy5);

                //normalize numeric attribute
                weka.filters.Filter myNormalized5 = new weka.filters.unsupervised.instance.Normalize();
                myNormalized5.setInputFormat(insts5);
                insts5 = weka.filters.Filter.useFilter(insts5, myNormalized5);

                //randomize the order of the instances in the dataset.
                weka.filters.Filter myRandom5 = new weka.filters.unsupervised.instance.Randomize();
                myRandom5.setInputFormat(insts5);
                insts5 = weka.filters.Filter.useFilter(insts5, myRandom5);

                int trainSize5             = insts5.numInstances() * percentSplit / 100;
                int testSize5              = insts5.numInstances() - trainSize5;
                weka.core.Instances train5 = new weka.core.Instances(insts5, 0, trainSize5);

                cl5.buildClassifier(train5);

                string str5 = cl5.toString();

                int numCorrect5 = 0;
                for (int i = trainSize5; i < insts5.numInstances(); i++)
                {
                    weka.core.Instance currentInst5    = insts5.instance(i);
                    double             predictedClass5 = cl5.classifyInstance(currentInst5);
                    if (predictedClass5 == insts5.instance(i).classValue())
                    {
                        numCorrect5++;
                    }
                }

                textBox7.Text = numCorrect5 + " out of " + testSize5 + " correct (" +
                                (double)((double)numCorrect5 / (double)testSize5 * 100.0) + "%)";



                string result1 = textBox3.Text;
                string output1 = result1.Split('(', ')')[1];
                output1 = output1.Remove(output1.Length - 1);
                double r1 = Convert.ToDouble(output1);

                string result2 = textBox4.Text;
                string output2 = result2.Split('(', ')')[1];
                output2 = output2.Remove(output2.Length - 1);
                double r2 = Convert.ToDouble(output2);

                string result3 = textBox5.Text;
                string output3 = result3.Split('(', ')')[1];
                output3 = output3.Remove(output3.Length - 1);
                double r3 = Convert.ToDouble(output3);

                string result4 = textBox6.Text;
                string output4 = result4.Split('(', ')')[1];
                output4 = output4.Remove(output4.Length - 1);
                double r4 = Convert.ToDouble(output4);

                string result5 = textBox7.Text;
                string output5 = result5.Split('(', ')')[1];
                output5 = output5.Remove(output5.Length - 1);
                double r5 = Convert.ToDouble(output5);


                double[] max_array = new double[] { r1, r2, r3, r4, r5 };

                double max = max_array.Max();
                if (r1 == max)
                {
                    textBox8.Text = "Best Algoritm is Decision Tree Algorithm ";
                }
                else if (r2 == max)
                {
                    textBox8.Text = "Best Algoritm is Naive Bayes Algorithm ";
                }
                else if (r3 == max)
                {
                    textBox8.Text = "Best Algoritm is K-Nearest Neighbour Algorithm ";
                }
                else if (r4 == max)
                {
                    textBox8.Text = "Best Algoritm is Artificial Neural Network Algorithm ";
                }
                else if (r5 == max)
                {
                    textBox8.Text = "Best Algoritm is Support Vector Machine Algorithm ";
                }
            }
        }
예제 #14
0
        private void result_Click(object sender, EventArgs e)
        {
            ArrayList algorithms = new ArrayList();

            algorithms.Add("Naive Bayes");
            algorithms.Add("K Nearest Neighbor");
            algorithms.Add("Decision Tree");
            algorithms.Add("Neural Network");
            algorithms.Add("Support Vector Machine");
            ArrayList successPercent = new ArrayList();
            double    res_Naive, res_KNN, res_NN, res_Tree, res_SVM = 0.0;
            string    nameOfAlgo = "";

            //NAIVE BAYES ALGORITHM
            weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader(fileDirectory));

            //CREATIING DYNAMIC GRIDVIEW FOR ADDING NEW INSTANCE
            dataGridView1.ColumnCount   = 2;
            dataGridView1.RowCount      = insts.numAttributes();
            String[,] matrixOfInstances = new String[insts.numInstances(), insts.numAttributes()];



            for (int y = 0; y < insts.numAttributes() - 1; y++)
            {
                dataGridView1.Rows[y].Cells[0].Value = insts.attribute(y).name();
                if (insts.attribute(y).isNominal())
                {
                    //nominalDataValues.Add(insts.attribute(y).toString());
                    string   phrase = insts.attribute(y).toString();
                    string[] first  = phrase.Split('{');

                    string[] second = first[1].Split('}');

                    string[] attributeValues = second[0].Split(',');

                    DataGridViewComboBoxCell comboColumn = new DataGridViewComboBoxCell();

                    foreach (var a in attributeValues)
                    {
                        comboColumn.Items.Add(a);
                    }
                    dataGridView1.Rows[y].Cells[1] = comboColumn;
                }
            }

            insts.setClassIndex(insts.numAttributes() - 1);
            cl_Naive = new weka.classifiers.bayes.NaiveBayes();

            weka.filters.Filter myNominalData = new weka.filters.unsupervised.attribute.Discretize();
            myNominalData.setInputFormat(insts);
            insts = weka.filters.Filter.useFilter(insts, myNominalData);


            //randomize the order of the instances in the dataset.
            weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
            myRandom.setInputFormat(insts);
            insts = weka.filters.Filter.useFilter(insts, myRandom);

            int trainSize = insts.numInstances() * percentSplit / 100;
            int testSize  = insts.numInstances() - trainSize;

            weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);

            cl_Naive.buildClassifier(train);

            string str = cl_Naive.toString();

            int numCorrect = 0;

            for (int i = trainSize; i < insts.numInstances(); i++)
            {
                weka.core.Instance currentInst    = insts.instance(i);
                double             predictedClass = cl_Naive.classifyInstance(currentInst);
                if (predictedClass == insts.instance(i).classValue())
                {
                    numCorrect++;
                }
            }
            res_Naive = (double)((double)numCorrect / (double)testSize * 100.0);
            successPercent.Add(res_Naive);
            //kNN

            weka.core.Instances insts2 = new weka.core.Instances(new java.io.FileReader(fileDirectory));

            insts2.setClassIndex(insts2.numAttributes() - 1);

            cl_Knn = new weka.classifiers.lazy.IBk();

            //Nominal to Binary
            weka.filters.Filter myBinaryData = new weka.filters.unsupervised.attribute.NominalToBinary();
            myBinaryData.setInputFormat(insts2);
            insts2 = weka.filters.Filter.useFilter(insts2, myBinaryData);

            //Normalization
            weka.filters.Filter myNormalized = new weka.filters.unsupervised.instance.Normalize();
            myNormalized.setInputFormat(insts2);
            insts2 = weka.filters.Filter.useFilter(insts2, myNormalized);

            //randomize the order of the instances in the dataset.
            weka.filters.Filter myRandom2 = new weka.filters.unsupervised.instance.Randomize();
            myRandom2.setInputFormat(insts2);
            insts2 = weka.filters.Filter.useFilter(insts2, myRandom2);

            int trainSize2 = insts2.numInstances() * percentSplit / 100;
            int testSize2  = insts2.numInstances() - trainSize2;

            weka.core.Instances train2 = new weka.core.Instances(insts2, 0, trainSize2);

            cl_Knn.buildClassifier(train2);

            string str2 = cl_Knn.toString();

            int numCorrect2 = 0;

            for (int i = trainSize2; i < insts2.numInstances(); i++)
            {
                weka.core.Instance currentInst2   = insts2.instance(i);
                double             predictedClass = cl_Knn.classifyInstance(currentInst2);
                if (predictedClass == insts2.instance(i).classValue())
                {
                    numCorrect2++;
                }
            }
            res_KNN = (double)((double)numCorrect2 / (double)testSize2 * 100.0);
            successPercent.Add(res_KNN);

            //Decision tree
            weka.core.Instances insts3 = new weka.core.Instances(new java.io.FileReader(fileDirectory));

            insts3.setClassIndex(insts3.numAttributes() - 1);

            cl_Tree = new weka.classifiers.trees.J48();



            weka.filters.Filter myNormalized2 = new weka.filters.unsupervised.instance.Normalize();
            myNormalized2.setInputFormat(insts3);
            insts3 = weka.filters.Filter.useFilter(insts3, myNormalized2);


            //randomize the order of the instances in the dataset.
            weka.filters.Filter myRandom3 = new weka.filters.unsupervised.instance.Randomize();
            myRandom3.setInputFormat(insts3);
            insts3 = weka.filters.Filter.useFilter(insts3, myRandom3);

            int trainSize3 = insts3.numInstances() * percentSplit / 100;
            int testSize3  = insts3.numInstances() - trainSize3;

            weka.core.Instances train3 = new weka.core.Instances(insts3, 0, trainSize3);

            cl_Tree.buildClassifier(train3);

            string str3 = cl_Tree.toString();

            int numCorrect3 = 0;

            for (int i = trainSize3; i < insts3.numInstances(); i++)
            {
                weka.core.Instance currentInst3   = insts3.instance(i);
                double             predictedClass = cl_Tree.classifyInstance(currentInst3);
                if (predictedClass == insts3.instance(i).classValue())
                {
                    numCorrect3++;
                }
            }
            res_Tree = (double)((double)numCorrect3 / (double)testSize3 * 100.0);
            successPercent.Add(res_Tree);

            //Neural Network
            weka.core.Instances insts4 = new weka.core.Instances(new java.io.FileReader(fileDirectory));

            insts4.setClassIndex(insts4.numAttributes() - 1);

            cl_NN = new weka.classifiers.functions.MultilayerPerceptron();

            //Nominal to Binary
            weka.filters.Filter myBinaryData2 = new weka.filters.unsupervised.attribute.NominalToBinary();
            myBinaryData2.setInputFormat(insts4);
            insts4 = weka.filters.Filter.useFilter(insts4, myBinaryData2);

            //Normalization
            weka.filters.Filter myNormalized3 = new weka.filters.unsupervised.instance.Normalize();
            myNormalized3.setInputFormat(insts4);
            insts4 = weka.filters.Filter.useFilter(insts4, myNormalized3);

            //randomize the order of the instances in the dataset.
            weka.filters.Filter myRandom4 = new weka.filters.unsupervised.instance.Randomize();
            myRandom4.setInputFormat(insts4);
            insts4 = weka.filters.Filter.useFilter(insts4, myRandom4);

            int trainSize4 = insts4.numInstances() * percentSplit / 100;
            int testSize4  = insts4.numInstances() - trainSize4;

            weka.core.Instances train4 = new weka.core.Instances(insts4, 0, trainSize4);

            cl_NN.buildClassifier(train4);

            string str4 = cl_NN.toString();

            int numCorrect4 = 0;

            for (int i = trainSize4; i < insts4.numInstances(); i++)
            {
                weka.core.Instance currentInst4   = insts4.instance(i);
                double             predictedClass = cl_NN.classifyInstance(currentInst4);
                if (predictedClass == insts4.instance(i).classValue())
                {
                    numCorrect4++;
                }
            }

            res_NN = (double)((double)numCorrect4 / (double)testSize4 * 100.0);
            successPercent.Add(res_NN);

            //SVM
            weka.core.Instances insts5 = new weka.core.Instances(new java.io.FileReader(fileDirectory));

            insts5.setClassIndex(insts5.numAttributes() - 1);

            cl_SVM = new weka.classifiers.functions.SMO();

            //Nominal to Binary
            weka.filters.Filter myBinaryData3 = new weka.filters.unsupervised.attribute.NominalToBinary();
            myBinaryData3.setInputFormat(insts5);
            insts5 = weka.filters.Filter.useFilter(insts5, myBinaryData3);

            //Normalization
            weka.filters.Filter myNormalized4 = new weka.filters.unsupervised.instance.Normalize();
            myNormalized4.setInputFormat(insts5);
            insts5 = weka.filters.Filter.useFilter(insts5, myNormalized4);

            //randomize the order of the instances in the dataset.
            weka.filters.Filter myRandom5 = new weka.filters.unsupervised.instance.Randomize();
            myRandom5.setInputFormat(insts5);
            insts5 = weka.filters.Filter.useFilter(insts5, myRandom5);

            int trainSize5 = insts5.numInstances() * percentSplit / 100;
            int testSize5  = insts5.numInstances() - trainSize5;

            weka.core.Instances train5 = new weka.core.Instances(insts5, 0, trainSize5);

            cl_SVM.buildClassifier(train5);

            string str5 = cl_SVM.toString();

            int numCorrect5 = 0;

            for (int i = trainSize5; i < insts5.numInstances(); i++)
            {
                weka.core.Instance currentInst5   = insts5.instance(i);
                double             predictedClass = cl_SVM.classifyInstance(currentInst5);
                if (predictedClass == insts5.instance(i).classValue())
                {
                    numCorrect5++;
                }
            }
            res_SVM = (double)((double)numCorrect5 / (double)testSize5 * 100.0);
            successPercent.Add(res_SVM);


            for (int i = 0; i < successPercent.Count; i++)
            {
                if ((double)successPercent[i] > max)
                {
                    max   = (double)successPercent[i];
                    count = i + 1;
                }
            }
            for (int i = 0; i < count; i++)
            {
                nameOfAlgo = (string)algorithms[i];
            }

            textBox1.Text = nameOfAlgo + " is the most successful algorithm for this data set." + "(" + max + "%)\n";
        }