Esempio n. 1
0
    public static void Main(String[] args)
    {
        try
        {
            // Load the model
            java.io.ObjectInputStream   stream       = new java.io.ObjectInputStream(new java.io.FileInputStream("iris_j48.model"));
            weka.classifiers.Classifier qhClassifier = (weka.classifiers.Classifier)stream.readObject();
            stream.close();

            // This model was trained on 66% of instances from the iris dataset. Test the model on remaining 34% instances.
            weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader("iris.arff"));
            insts.setClassIndex(insts.numAttributes() - 1);
            int percentSplit = 66;
            int trainSize    = insts.numInstances() * percentSplit / 100;
            int testSize     = insts.numInstances() - trainSize;
            int numCorrect   = 0;
            for (int i = trainSize; i < insts.numInstances(); i++)
            {
                weka.core.Instance currentInst    = insts.instance(i);
                double             predictedClass = qhClassifier.classifyInstance(currentInst);
                if (predictedClass == insts.instance(i).classValue())
                {
                    numCorrect++;
                }
            }
            Console.WriteLine(numCorrect + " out of " + testSize + " correct (" + (double)((double)numCorrect / (double)testSize * 100.0) + "%)");
        }
        catch (java.lang.Exception e)
        {
            e.printStackTrace();
        }
    }
Esempio n. 2
0
        /// <summary>
        /// Creates a classifier of the desired type from an .arff file
        /// </summary>
        /// <param name="ARFFfile">The arff file to read from. Should be a full path.</param>
        /// <param name="classifier">The type of classifier you want to make.</param>
        /// <returns>The classifier you created</returns>
        public void createModel(string ARFFfile, Classifier myClassifier)
        {
            if (debug)
            {
                Console.WriteLine("Loading ARFF file " + ARFFfile);
            }

            _classifier = GetClassifier(myClassifier);
            try
            {
                _dataSet = new weka.core.Instances(new java.io.FileReader(ARFFfile));
                if (debug)
                {
                    Console.WriteLine("You have " + _dataSet.numAttributes() + " attributes.");
                }
                _dataSet.setClassIndex(_dataSet.numAttributes() - 1);

                _classifier.buildClassifier(_dataSet);

                if (debug)
                {
                    Console.WriteLine(_classifier.toString());
                }
            }
            catch (Exception e)
            {
                Console.WriteLine("You failed. End of Game. Poor Weka.");
                Console.WriteLine(e);
            }
        }
Esempio n. 3
0
        public WekaClassifier CreateWekaClassifier()
        {
            _aco1.Initialize();
            _aco1.Work();
            //_aco1.PostProcessing();

            Dataset reduced = null;

            if (_attributeFirst)
            {
                reduced = _trainingSet.ReduceAttributes(ACO1.BestAnt.Solution.AttributesToRemove());
            }
            else
            {
                reduced = _trainingSet.ReduceInstances(ACO1.BestAnt.Solution.InstancesToRemove());
            }


            _aco2.Initialize(_aco1.BestAnt.Solution);
            _aco2.Work();
            //_aco2.PostProcessing();



            this._bestSolution = ACO2.BestAnt.Solution;
            weka.classifiers.Classifier classifier = ((WekaClassificationQualityEvaluator)_aco1.Problem.SolutionQualityEvaluator).CreateClassifier(this._bestSolution);

            WekaClassifier wekaClassifier = new WekaClassifier();

            wekaClassifier.Classifier         = classifier;
            wekaClassifier.AttributesToRemove = this._bestSolution.AttributesToRemove();

            return(wekaClassifier);
        }
Esempio n. 4
0
        public List <string> Classify(string model, string test)
        {
            List <string> ret = new List <string>();

            try
            {
                java.io.ObjectInputStream   ois = new java.io.ObjectInputStream(new java.io.FileInputStream(model));
                weka.classifiers.Classifier cl  = (weka.classifiers.Classifier)ois.readObject();
                ois.close();

                weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader(test));
                insts.setClassIndex(insts.numAttributes() - 1);
                for (int i = 0; i < 1; i++)
                {
                    weka.core.Instance currentInst    = insts.instance(i);
                    double             predictedClass = cl.classifyInstance(currentInst);
                    double[]           distrs         = cl.distributionForInstance(currentInst);
                    //string actual = insts.classAttribute().value((int)currentInst.classValue());
                    //string predicted = insts.classAttribute().value((int)predictedClass);
                    // System.Console.WriteLine("ID: " + (i + 1) + ", " + predicted);
                    for (int j = 0; j < distrs.Length; j++)
                    {
                        string predicted = insts.classAttribute().value(j);
                        string distr     = distrs[j].ToString("#0.000");
                        ret.Add(predicted + "," + distr);
                    }
                }
                return(ret);
            }
            catch
            {
                return(ret);
            }
        }
Esempio n. 5
0
    //**************************************************************************************

    protected override void LoadModel()
    {
        lock (this)
        {
            if (Model is null)
            {
                Model = (weka.classifiers.Classifier)weka.core.SerializationHelper.read(ModelFilename);
            }
        }
    }
        public WekaNETBridge.WekaClassifier CreateWekaClassifier(weka.classifiers.Classifier currentClassifier, Solution <DRComponent> solution)
        {
            weka.core.Instances         reducedDataset = WekaNETBridge.WekaClassification.GetReducedDataset(this.WekaClassification.OriginalDataset, solution.AttributesToRemove(), solution.InstancesToRemove());
            weka.classifiers.Classifier classifier     = this.WekaClassification.CreateClassifier(reducedDataset, currentClassifier);

            WekaNETBridge.WekaClassifier wekaClassifier = new WekaNETBridge.WekaClassifier();
            wekaClassifier.Classifier         = classifier;
            wekaClassifier.AttributesToRemove = solution.AttributesToRemove();
            return(wekaClassifier);
        }
    public static double Classify(this weka.classifiers.Classifier classifier, params object[] example)
    {
        // instance lenght + 1, because class variable is not included in example
        var instance = new Instance(example.Length + 1);

        for (int i = 0; i < example.Length; i++)
        {
            instance.setValue(i, Convert.ToDouble(example[i]));
        }

        return(classifier.classifyInstance(instance));
    }
Esempio n. 8
0
        public WekaClassifier CreateWekaClassifier()
        {
            this.Work();
            weka.classifiers.Classifier classifier = ((WekaClassificationQualityEvaluator)this._problem.SolutionQualityEvaluator).CreateClassifier(this.BestAnt.Solution);

            WekaClassifier wekaClassifier = new WekaClassifier();

            wekaClassifier.Classifier         = classifier;
            wekaClassifier.AttributesToRemove = this.BestAnt.Solution.AttributesToRemove();

            return(wekaClassifier);
        }
Esempio n. 9
0
        public void evaluateModel(weka.classifiers.Classifier classifier, weka.core.Instances instances)
        {
            double[] v = WekaUtils.ClassifyInstances(instances, classifier);

            //weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(instances, m_costMatrix);
            //v = eval.evaluateModel(classifier, instances);
            evaluateModel(v, instances);

            //int n = 0;
            //for (int i = 0; i < v.Length; ++i)
            //    if (v[i] != 0)
            //        n++;
        }
        public static double SupportVectorMachineTest(weka.core.Instances insts)
        {
            try
            {
                //weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader("iris.arff"));

                insts.setClassIndex(insts.numAttributes() - 1);


                SupportVectorMachine = new weka.classifiers.functions.SMO();

                weka.filters.Filter myDummy = new weka.filters.unsupervised.attribute.NominalToBinary();

                myDummy.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myDummy);


                weka.filters.Filter myNormalize = new weka.filters.unsupervised.instance.Normalize();
                myNormalize.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myNormalize);

                weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                myRandom.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myRandom);

                int trainSize             = insts.numInstances() * percentSplit / 100;
                int testSize              = insts.numInstances() - trainSize;
                weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);


                SupportVectorMachine.buildClassifier(train);


                int numCorrect = 0;
                for (int i = trainSize; i < insts.numInstances(); i++)
                {
                    weka.core.Instance currentInst    = insts.instance(i);
                    double             predictedClass = SupportVectorMachine.classifyInstance(currentInst);
                    if (predictedClass == insts.instance(i).classValue())
                    {
                        numCorrect++;
                    }
                }
                return((double)numCorrect / (double)testSize * 100.0);
            }
            catch (java.lang.Exception ex)
            {
                ex.printStackTrace();
                return(0);
            }
        }
Esempio n. 11
0
        /// <summary>
        /// Loads a classifier from a model file.
        /// </summary>
        /// <param name="filename">The filename (full path) that you want to load. Should be an .arff file and a .model file in your working directory.</param>
        public void loadModel(string filename)
        {
            if (debug)
            {
                Console.WriteLine("Model loading...");
            }
            _classifier = (weka.classifiers.Classifier)weka.core.SerializationHelper.read(filename + MODEL);
            _dataSet    = new weka.core.Instances(new java.io.FileReader(filename + ARFF));
            _dataSet.setClassIndex(_dataSet.numAttributes() - 1);

            if (debug)
            {
                Console.WriteLine("Model locked and loaded!");
            }
        }
        public static double NaiveBayesTest(weka.core.Instances insts)
        {
            try
            {
                //weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader("iris.arff"));

                insts.setClassIndex(insts.numAttributes() - 1);


                NaiveBayescl = new weka.classifiers.bayes.NaiveBayes();


                //discretize
                weka.filters.Filter myDiscretize = new weka.filters.unsupervised.attribute.Discretize();
                myDiscretize.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myDiscretize);

                weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                myRandom.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myRandom);

                int trainSize             = insts.numInstances() * percentSplit / 100;
                int testSize              = insts.numInstances() - trainSize;
                weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);

                NaiveBayescl.buildClassifier(train);


                int numCorrect = 0;
                for (int i = trainSize; i < insts.numInstances(); i++)
                {
                    weka.core.Instance currentInst    = insts.instance(i);
                    double             predictedClass = NaiveBayescl.classifyInstance(currentInst);
                    if (predictedClass == insts.instance(i).classValue())
                    {
                        numCorrect++;
                    }
                }
                return((double)numCorrect / (double)testSize * 100.0);
            }
            catch (java.lang.Exception ex)
            {
                ex.printStackTrace();
                return(0);
            }
        }
    protected void Button2_Click(object sender, EventArgs e)
    {
        weka.core.Instances data = new weka.core.Instances(new java.io.FileReader("d:\\train.arff"));
        data.setClassIndex(data.numAttributes() - 1);
        weka.classifiers.Classifier cls = new weka.classifiers.bayes.NaiveBayes();
        // weka.classifiers.functions.supportVector.SMOset();
        int runs  = 1;
        int folds = 10;

        //string sq = "delete from nbresults";
        //dbc.execfn(sq);
        // perform cross-validation
        for (int i = 0; i < runs; i++)
        {
            // randomize data
            int seed = i + 1;
            java.util.Random    rand     = new java.util.Random(seed);
            weka.core.Instances randData = new weka.core.Instances(data);
            randData.randomize(rand);
            if (randData.classAttribute().isNominal())
            {
                randData.stratify(folds);
            }
            // weka.classifiers.trees.j48 jj;
            weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(randData);
            for (int n = 0; n < folds; n++)
            {
                weka.core.Instances train = randData.trainCV(folds, n);
                weka.core.Instances test  = randData.testCV(folds, n);
                // build and evaluate classifier
                weka.classifiers.Classifier clsCopy = weka.classifiers.Classifier.makeCopy(cls);
                clsCopy.buildClassifier(train);

                eval.evaluateModel(clsCopy, test);
            }

            preci_value.Text  = eval.precision(0).ToString();
            recall_value.Text = eval.recall(0).ToString();
            acc_value.Text    = eval.fMeasure(0).ToString();

            string s = "NB";
            //    string str = "insert into evaluation values('" + instid.Text + "','" + courid.Text.ToString() + "','" + preci_value.Text.ToString() + "','" + recall_value.Text.ToString() + "','" + acc_value.Text.ToString() + "','" + s + "' )";
            //  db.execfn(str);
            //  MessageBox.Show("saved");
        }
    }
Esempio n. 14
0
        public void Test2()
        {
            java.io.ObjectInputStream   ois = new java.io.ObjectInputStream(new java.io.FileInputStream("D:\\android_analysis\\som_model.model"));
            weka.classifiers.Classifier cl  = (weka.classifiers.Classifier)ois.readObject();
            ois.close();

            weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader("D:\\android_analysis\\test1.arff"));
            insts.setClassIndex(insts.numAttributes() - 1);
            for (int i = 0; i < insts.numInstances(); i++)
            {
                weka.core.Instance currentInst    = insts.instance(i);
                double             predictedClass = cl.classifyInstance(currentInst);
                double[]           distrs         = cl.distributionForInstance(currentInst);
                //string actual = insts.classAttribute().value((int)currentInst.classValue());
                //string predicted = insts.classAttribute().value((int)predictedClass);
                // System.Console.WriteLine("ID: " + (i + 1) + ", " + predicted);
            }
        }
Esempio n. 15
0
        //Artificial NN
        public static double ArtificialNN(weka.core.Instances insts)
        {
            try
            {
                insts.setClassIndex(insts.numAttributes() - 1);

                Anncl = new weka.classifiers.functions.MultilayerPerceptron();

                weka.filters.Filter myDummy = new weka.filters.unsupervised.attribute.NominalToBinary();
                myDummy.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myDummy);

                weka.filters.Filter myNormalize = new weka.filters.unsupervised.instance.Normalize();
                myNormalize.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myNormalize);

                weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                myRandom.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myRandom);

                int trainSize             = insts.numInstances() * percentSplit / 100;
                int testSize              = insts.numInstances() - trainSize;
                weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);

                Anncl.buildClassifier(train);

                int numCorrect = 0;
                for (int i = trainSize; i < insts.numInstances(); i++)
                {
                    weka.core.Instance currentInst    = insts.instance(i);
                    double             predictedClass = Anncl.classifyInstance(currentInst);
                    if (predictedClass == insts.instance(i).classValue())
                    {
                        numCorrect++;
                    }
                }
                return((double)numCorrect / (double)testSize * 100.0);
            }
            catch (java.lang.Exception ex)
            {
                ex.printStackTrace();
                return(0);
            }
        }
Esempio n. 16
0
 //0 = NN, 2 = Bay, 1 = Tree
 static public weka.classifiers.Classifier step_buildClassifier(int algo)
 {
     weka.classifiers.Classifier classifier = null;
     if (algo == 0)
     {
         classifier_NN.setLearningRate(0.3);
         classifier_NN.setMomentum(0.2);
         classifier_NN.setHiddenLayers("a");
         classifier = classifier_NN;
     }
     else if (algo == 2)
     {
         classifier = classifier_NBay;
     }
     else
     {
         classifier = classifier_tree;
     }
     return(classifier);
 }
        public void EvaluateSolutionQuality(Solution <DRComponent> solution)
        {
            double quality = 0;

            weka.core.Instances         validationSet = this.WekaClassification.OriginalDataset;
            weka.classifiers.Classifier classifier    = this.CreateClassifier(solution);

            if (!this._skipAttibutesValidation)
            {
                int[] attributesToRmove = solution.AttributesToRemove();
                if (attributesToRmove.Length != 0)
                {
                    validationSet = WekaNETBridge.WekaClassification.GetReducedDataset(this.WekaClassification.OriginalDataset, attributesToRmove, null);
                }
            }

            quality = WekaNETBridge.WekaClassification.EvaluateClassifier(classifier, validationSet);

            solution.Quality = quality;
        }
    /* Use when the player logs in to initially create the classifier with data from server */
    public void InitializeClassifier(String dataString)
    {
        try {
            java.io.StringReader   stringReader = new java.io.StringReader(dataString);
            java.io.BufferedReader buffReader   = new java.io.BufferedReader(stringReader);

            playerData = new weka.core.Instances(buffReader);

            /* State where in each Instance the class attribute is, if its not already specified by the file */
            if (playerData.classIndex() == -1)
            {
                playerData.setClassIndex(playerData.numAttributes() - 1);
            }

            /* NAIVE BAYES */
            //classifier = new weka.classifiers.bayes.NaiveBayes();

            /* NEURAL NET */
            //classifier = new weka.classifiers.functions.MultilayerPerceptron();
            //((weka.classifiers.functions.MultilayerPerceptron)classifier).setHiddenLayers("12");

            /* J48 TREE */
            //classifier = new weka.classifiers.trees.J48();

            /* IB1 NEAREST NEIGHBOUR */
            //classifier = new weka.classifiers.lazy.IB1();

            /* RANDOM FOREST */
            classifier = new weka.classifiers.trees.RandomForest();


            classifier.buildClassifier(playerData);
            Debug.Log("Initialized Classifier");
        }
        catch (java.lang.Exception ex)
        {
            Debug.LogError(ex.getMessage());
        }
    }
Esempio n. 19
0
        private void testButton_Click(object sender, EventArgs e)
        {
            var form = Form.ActiveForm as Form1;

            if (readyToTest)
            {
                weka.classifiers.Classifier cl   = classifiers[highestSuccessRate.Key];
                weka.core.Instance          inst = new weka.core.Instance(insts.numAttributes() - 1);
                inst.setDataset(insts);
                for (int i = 0; i < inputObjects.Count; i++)
                {
                    if (inputObjects[i].numeric)
                    {
                        inst.setValue(i, Decimal.ToDouble(inputObjects[i].num.Value));
                    }
                    else
                    {
                        inst.setValue(i, inputObjects[i].nom.SelectedItem.ToString());
                    }
                }


                try
                {
                    string[] values      = insts.attribute(insts.numAttributes() - 1).toString().Split('{', '}')[1].Split(',');
                    double   classOfData = cl.classifyInstance(inst);
                    int      idx         = Convert.ToInt32(classOfData);
                    form.testResult.Text = values[idx];
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.Message, "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
                }
            }
            else
            {
                MessageBox.Show("Program is not ready to test, probably needs to process data first.", "Not ready", MessageBoxButtons.OK, MessageBoxIcon.Information);
            }
        }
Esempio n. 20
0
 public static void do_Classification_full(Boolean loadData)
 {
     try
     {
         if (loadData == true)
         {
             step_loadInstance();
         }
         insts.setClassIndex(colClass);//Which Column is classified
         if (random_sort == true)
         {
             step_randomInstanceOrder();
         }
         //------ Train & Test ----------
         classifier = step_buildClassifier(classify_model);
         txtResult_Reset();
         step_train_test();
     }
     catch (Exception ex)
     {
         TheSys.showError("Err: " + ex.ToString(), true);
     }
 }
Esempio n. 21
0
        //Random Forest
        public static double RandomForestTest(weka.core.Instances insts)
        {
            try
            {
                insts.setClassIndex(insts.numAttributes() - 1);

                RandomForestcl = new weka.classifiers.trees.RandomForest();

                weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                myRandom.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myRandom);

                int trainSize             = insts.numInstances() * percentSplit / 100;
                int testSize              = insts.numInstances() - trainSize;
                weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);

                RandomForestcl.buildClassifier(train);


                int numCorrect = 0;
                for (int i = trainSize; i < insts.numInstances(); i++)
                {
                    weka.core.Instance currentInst    = insts.instance(i);
                    double             predictedClass = RandomForestcl.classifyInstance(currentInst);
                    if (predictedClass == insts.instance(i).classValue())
                    {
                        numCorrect++;
                    }
                }
                return((double)numCorrect / (double)testSize * 100.0);
            }
            catch (java.lang.Exception ex)
            {
                ex.printStackTrace();
                return(0);
            }
        }
            public int InitializeClassifier(string[] atributes, string[] gestures, string classAttribute, string modelLocation)
            {
                java.io.ObjectInputStream ois = new java.io.ObjectInputStream(new java.io.FileInputStream(modelLocation));


                m_cl = (weka.classifiers.Classifier)ois.readObject();

                //Declare the feature vector
                weka.core.FastVector fvWekaFeatureVector = new weka.core.FastVector(atributes.Length + 1);
                for (int i = 0; i < atributes.Length; i++)
                {
                    weka.core.Attribute aux = new weka.core.Attribute(atributes[i]);
                    fvWekaFeatureVector.addElement(aux);
                }


                //Declare the class weka.core.Attribute along with its values
                weka.core.FastVector fvClassValues = new weka.core.FastVector(gestures.Length);
                for (int i = 0; i < gestures.Length; i++)
                {
                    weka.core.Attribute z1 = new weka.core.Attribute(atributes[i]);
                    fvClassValues.addElement(gestures[i]);
                }
                //fvClassValues.addElement("yes");
                //fvClassValues.addElement("no");

                weka.core.Attribute ClassAttribute = new weka.core.Attribute(classAttribute, fvClassValues);

                fvWekaFeatureVector.addElement(ClassAttribute);

                dataSet = new weka.core.Instances("TestRel", fvWekaFeatureVector, 10);
                dataSet.setClassIndex(atributes.Length);

                testInstance = new weka.core.Instance(atributes.Length + 1);

                return(1);
            }
        private void Browse_Click(object sender, EventArgs e)
        {
            OpenFileDialog ofd = new OpenFileDialog();

            ofd.ShowDialog();
            file        = ofd.SafeFileName;
            label2.Text = "Wait process in progress";

            weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader(file));
            double max_value          = J48classifyTest(insts);

            model = J48cl;
            string name = "J48cl";


            double NBvalue = NaiveBayesTest(insts);

            if (NBvalue > max_value)
            {
                max_value = NBvalue;
                model     = NaiveBayescl;
                name      = "NaiveBayes";
            }
            double RFvalue = RandomForestTest(insts);

            if (RFvalue > max_value)
            {
                max_value = RFvalue;
                model     = RandomForestcl;
                name      = "RandomForest";
            }
            double RTvalue = RandomTreeTest(insts);

            if (RTvalue > max_value)
            {
                max_value = RTvalue;
                model     = RandomTreecl;
                name      = "RandomTree";
            }
            double _5IBKvalue = _5IBkTest(insts);

            if (_5IBKvalue > max_value)
            {
                max_value = _5IBKvalue;
                model     = _5IBKcl;
                name      = " _5IBK";
            }
            double _7IBKvalue = _7IBkTest(insts);

            if (_7IBKvalue > max_value)
            {
                max_value = _7IBKvalue;
                model     = _7IBKcl;
                name      = " _7IBk";
            }
            double _9IBKvalue = _9IBkTest(insts);

            if (_9IBKvalue > max_value)
            {
                max_value = _9IBKvalue;
                model     = _9IBKcl;
                name      = " _9IBk";
            }
            double LogRegressionvalue = LogRegressionTest(insts);

            if (LogRegressionvalue > max_value)
            {
                max_value = LogRegressionvalue;
                model     = LogRegressioncl;
                name      = "LogRegression";
            }
            double SVM = SupportVectorMachineTest(insts);

            if (SVM > max_value)
            {
                max_value = SVM;
                model     = SupportVectorMachine;
                name      = "SupportVectorMachine";
            }
            double ArtNN = ArtNeuralNetworkTest(insts);

            if (ArtNN > max_value)
            {
                max_value = ArtNN;
                model     = ArtNeuralNetwork;
                name      = "ArtNeuralNetwork";
            }

            label2.Text = name + " is the most successful algorithm for this data set " + "(%" + Math.Round(max_value, 2) + ")";

            for (int i = 0; i < insts.numAttributes() - 1; i++)
            {
                if (insts.attribute(i).isNominal())
                {
                    Label l = new Label();
                    flowLayoutPanel2.Controls.Add(l);

                    l.Top  = i * 30 + 175;
                    l.Left = 100;
                    l.Text = insts.attribute(i).name().ToString() + ": ";


                    ComboBox mybox = new ComboBox();
                    for (int j = 0; j < insts.attribute(i).numValues(); j++)
                    {
                        mybox.Items.Add(insts.attribute(i).value(j));
                    }
                    // Creating and setting the properties of comboBox

                    mybox.DropDownStyle = ComboBoxStyle.DropDownList;
                    mybox.Size          = new Size(100, 30);
                    mybox.Top           = i * 30 + 175;
                    l.Left    = 200;
                    mybox.Tag = i;
                    flowLayoutPanel2.Controls.Add(mybox);
                    list.Add(mybox);
                }
                else
                {
                    Label l = new Label();
                    flowLayoutPanel2.Controls.Add(l);

                    l.Text = insts.attribute(i).name().ToString() + ": ";
                    TextBox txt = new TextBox();
                    txt.Tag = i;
                    list.Add(txt);
                    flowLayoutPanel2.Controls.Add(txt);
                }
            }

            Button button = new Button();

            button.Name      = "Discover";
            button.Text      = "Find";
            button.Location  = new Point(468, 72);
            button.Size      = new Size(60, 30);
            button.BackColor = Color.Red;
            button.Font      = new Font(button.Font.Name, button.Font.Size, FontStyle.Bold);
            button.Click    += new EventHandler(button1_Click);

            Controls.Add(button);
        }
Esempio n. 24
0
        private void Train(string str)
        {
            if (string.IsNullOrEmpty(str))
            {
                return;
            }
            m_cls = CreateClassifier(str);
            if (m_cls == null)
            {
                MessageBox.Show("Can't Create Classifier!");
                return;
            }

            var trainInstances = CreateCurrentInstances();

            m_cls.buildClassifier(trainInstances);

            // TEST
            var           data = CreateEmptyInstances();
            StringBuilder sb   = new StringBuilder();

            if (m_cls is MLEA.IBatchClassifier)
            {
                MLEA.IBatchClassifier batchClassifier = m_cls as MLEA.IBatchClassifier;
                for (int i = 0; i < XLEN; i++)
                {
                    for (int j = 0; j < YLEN; j++)
                    {
                        var vals = new double[data.numAttributes()];
                        vals[0] = (double)i / XLEN;
                        vals[1] = (double)j / YLEN;

                        var instance = new weka.core.DenseInstance(1.0, vals);
                        data.add(instance);
                        instance.setDataset(data);
                    }
                }

                double[] ds = batchClassifier.classifyInstances(data);
                for (int i = 0; i < XLEN; i++)
                {
                    for (int j = 0; j < YLEN; j++)
                    {
                        double d = ds[i * XLEN + j];

                        if (m_enableLog)
                        {
                            string s = string.Format("{0}, {1}: {2}", data.instance(i * XLEN + j).value(0).ToString("N2"), data.instance(i * XLEN + j).value(1).ToString("N2"), d.ToString("N0"));
                            sb.AppendLine(s);
                        }

                        for (int ii = 0; ii < WXLEN / XLEN; ++ii)
                        {
                            for (int jj = 0; jj < WYLEN / YLEN; ++jj)
                            {
                                m_pictureBoxBitmap.SetPixel(i * WXLEN / XLEN + ii, j * WYLEN / YLEN + jj, GetValueColor((int)d, false));
                            }
                        }
                    }
                }
            }
            else
            {
                for (int i = 0; i < XLEN; i++)
                {
                    for (int j = 0; j < YLEN; j++)
                    {
                        var vals = new double[data.numAttributes()];
                        vals[0] = (double)i / XLEN;
                        vals[1] = (double)j / YLEN;

                        var instance = new weka.core.DenseInstance(1.0, vals);
                        data.add(instance);
                        instance.setDataset(data);

                        double d = m_cls.classifyInstance(instance);

                        if (m_enableLog)
                        {
                            string s = string.Format("{0}, {1}: {2}", vals[0].ToString("N2"), vals[1].ToString("N2"), d.ToString("N0"));
                            sb.AppendLine(s);
                        }

                        for (int ii = 0; ii < WXLEN / XLEN; ++ii)
                        {
                            for (int jj = 0; jj < WYLEN / YLEN; ++jj)
                            {
                                m_pictureBoxBitmap.SetPixel(i * WXLEN / XLEN + ii, j * WYLEN / YLEN + jj, GetValueColor((int)d, false));
                            }
                        }
                    }
                }
            }
            draw_all_points();

            this.Invoke(new Action(() =>
            {
                if (m_enableLog)
                {
                    txtLog.AppendText(sb.ToString());
                }
            }));

            if (m_enableEvaluation)
            {
                Test(trainInstances);
            }
        }
Esempio n. 25
0
    //**************************************************************************************

    /// <summary>
    ///	Build cllasifier model and save it to a file.
    /// </summary>
    public override void Build(CandlestickCollection iCandlestick)
    {
        List <int> trainingPoints = null;

        // Calculate average profit and std dev
        if (J48Info.ProfitAverage is null || J48Info.ProfitStdDev is null)
        {
            trainingPoints = LoadTrainingPoints(iCandlestick, ID, ProfitTime);
            float[] profits = FullToTraining(new List <float>(CalculateFutureProfits(iCandlestick[kTrainingPeriod], ProfitTime)), trainingPoints).ToArray();
            J48Info.ProfitStdDev  = Statistics.StandardDeviation(profits);
            J48Info.ProfitAverage = J48Info.ParentID is null ? 0.0f : Statistics.ArithmeticMean(profits);
            WekaJ48Info.UpdateDB(J48Info);
        }

        // Build model
        if (!File.Exists(ModelFilename))
        {
            OutputMessage("Building model");

            if (trainingPoints is null)
            {
                trainingPoints = LoadTrainingPoints(iCandlestick, ID, ProfitTime);
            }

            Model = new weka.classifiers.trees.J48();
            Model.buildClassifier(CreateInstances(iCandlestick, trainingPoints, Attributes, Parameters, Period, ProfitTime));
            weka.core.SerializationHelper.write(ModelFilename, Model);
        }

        // Perfrom crossfold test
        if (J48Info.Precision is null)
        {
            if (Model is null)
            {
                LoadModel();
            }

            OutputMessage("Perfroming crossfold");

            if (trainingPoints is null)
            {
                trainingPoints = LoadTrainingPoints(iCandlestick, ID, ProfitTime);
            }

            var instances  = CreateInstances(iCandlestick, trainingPoints, Attributes, Parameters, Period, ProfitTime);
            var evaluation = new weka.classifiers.Evaluation(instances);
            evaluation.crossValidateModel(Model, instances, 10, new java.util.Random(0));

            J48Info.Precision = (float)evaluation.pctCorrect();

            WekaJ48Info.UpdateDB(J48Info);
        }

        // Perfrom singular test
        if (J48Info.IsSingular == null)
        {
            if (Model is null)
            {
                LoadModel();
            }

            OutputMessage("Perfroming singular test");

            var results = new SortedList <Prediction, List <int> >();
            foreach (Prediction p in (Prediction[])Enum.GetValues(typeof(Prediction)))
            {
                results.Add(p, new List <int>());
            }

            if (trainingPoints is null)
            {
                trainingPoints = LoadTrainingPoints(iCandlestick, ID, ProfitTime);
            }

            var parameters = CalculateParameters(Parameters, iCandlestick, trainingPoints, Period);

            for (int k = 0; k < parameters.Count; k++)
            {
                var pred = Predict(parameters[k]);
                results[pred].Add(trainingPoints[k]);
            }

            J48Info.IsSingular = results.Count(x => x.Value.Count > 0) <= 1;

            WekaJ48Info.UpdateDB(J48Info);
        }

        // Calculating prediction profits
        if (J48Info.PredictionProfits.Count(x => x != null) == 0)
        {
            if (Model is null)
            {
                LoadModel();
            }

            OutputMessage("Calculating prediction profits");

            if (trainingPoints is null)
            {
                trainingPoints = LoadTrainingPoints(iCandlestick, ID, ProfitTime);
            }

            var predictionPoints = GetHistoricalPredictionPoints(iCandlestick, trainingPoints);

            foreach (Prediction p in (Prediction[])Enum.GetValues(typeof(Prediction)))
            {
                float[] profits = FullToTraining(new List <float>(CalculateFutureProfits(iCandlestick[kTrainingPeriod], ProfitTime)), predictionPoints[p]).ToArray();

                if (profits.Length < 10)
                {
                    J48Info.PredictionProfits[(int)p] = DecisionToFutureProfit(p, (float)J48Info.ProfitStdDev, (float)J48Info.ProfitAverage);
                }
                else
                {
                    J48Info.PredictionProfits[(int)p] = Statistics.ArithmeticMean(profits);
                }
            }

            WekaJ48Info.UpdateDB(J48Info);
        }

        // Create children
        if (!J48Info.ReproductionComplete.GetValueOrDefault(false))
        {
            lock (this)
            {
                if (J48Info.Precision > 50.0f && !J48Info.IsSingular.GetValueOrDefault(false))
                {
                    OutputMessage("Creating children");

                    if (trainingPoints is null)
                    {
                        trainingPoints = LoadTrainingPoints(iCandlestick, ID, ProfitTime);
                    }

                    var predictionPoints = GetHistoricalPredictionPoints(iCandlestick, trainingPoints);

                    foreach (Prediction p in (Prediction[])Enum.GetValues(typeof(Prediction)))
                    {
                        if (predictionPoints[p] != null && predictionPoints[p].Count >= 1000 && J48Info.ChildrenID[(int)p] == null)
                        {
                            var child = CreateNew(ParametersID, Parameters, Period, ProfitTime, predictionPoints[p]);

                            // Set parent
                            child.J48Info.ParentID = ID;
                            WekaJ48Info.UpdateDB(child.J48Info);

                            // Update parent info
                            J48Info.ChildrenID[(int)p] = (int)child.ID;
                            WekaJ48Info.UpdateDB(J48Info);
                            childs[(int)p] = child;
                        }
                    }
                }

                J48Info.ReproductionComplete = true;
                WekaJ48Info.UpdateDB(J48Info);
            }
        }
    }
Esempio n. 26
0
        //Dosya seçim bölümü ve yüzde hesabı bölümü
        private void btnBrowse_Click(object sender, EventArgs e)
        {
            clears();
            OpenFileDialog file = new OpenFileDialog();

            file.Filter      = "Files (ARFF)|*.ARFF";
            file.Multiselect = false;
            file.Title       = "Please select a dataset file!";
            if (file.ShowDialog() == DialogResult.OK)
            {
                txtPath.Text = file.FileName;
                fileName     = file.SafeFileName;

                //dosya seçildikten sonra işlemi gerçekleştiriyor.
                try
                {
                    if (txtPath.Text.Length < 1)
                    {
                        MessageBox.Show("Please select file!", "Error Message!");
                        txtPath.Text = "";
                    }
                    else
                    {
                        this.Text = "Processing...";
                        insts     = new weka.core.Instances(new java.io.FileReader(txtPath.Text));
                        //naive bayes
                        double max_value = NaiveBayesTest(insts);
                        model = NaiveBayescl;
                        name  = "Naïve Bayes";

                        //logistic regression
                        double LogRegressionvalue = LogRegressionTest(insts);
                        if (LogRegressionvalue > max_value)
                        {
                            max_value = LogRegressionvalue;
                            model     = LogRegressioncl;
                            name      = "Logistic Regression";
                        }
                        //knn
                        double KnnValue = Knn(insts);
                        if (KnnValue > max_value)
                        {
                            max_value = KnnValue;
                            model     = Knncl;
                            name      = "K-Nearest Neighbour";
                        }
                        //J48
                        double J48Value = J48classifyTest(insts);
                        if (J48Value > max_value)
                        {
                            max_value = J48Value;
                            model     = J48cl;
                            name      = "Decision Tree(J48)";
                        }
                        //Random forest
                        double RFvalue = RandomForestTest(insts);
                        if (RFvalue > max_value)
                        {
                            max_value = RFvalue;
                            model     = RandomForestcl;
                            name      = "Decision Tree(Random Forest)";
                        }
                        //Random Tree
                        double RTvalue = RandomTreeTest(insts);
                        if (RTvalue > max_value)
                        {
                            max_value = RTvalue;
                            model     = RandomTreecl;
                            name      = "Decision Tree(Random Tree)";
                        }
                        //Artificial nn
                        double AnnValue = ArtificialNN(insts);
                        if (AnnValue > max_value)
                        {
                            max_value = AnnValue;
                            model     = Anncl;
                            name      = "Artificial Neural Network";
                        }
                        //Svm
                        double SvmValue = SVM(insts);
                        if (SvmValue > max_value)
                        {
                            max_value = SvmValue;
                            model     = Svmcl;
                            name      = "Support Vector Machine";
                        }

                        //Model kaydetme kısmı
                        weka.core.SerializationHelper.write("models/mdl.model", model);

                        lblResult.Text = name + " is the most successful algorithm for this data set (%" + string.Format("{0:0.00}", max_value) + ")";
                        this.Text      = "DEUCENG - ML Classification Tool";

                        //seçme işlemleri
                        numAtt = insts.numAttributes() - 1;

                        int x = 30, y = 130, t = 35, l = 110;
                        int txt = 0, cmb = 0, r1 = 0, r2 = 0;
                        labels = new Label[insts.numAttributes()];
                        for (int i = 0; i < numAtt; i++)
                        {
                            if (insts.attribute(i).isNumeric())
                            {
                                txt++;
                            }
                            else if (insts.attribute(i).isNominal())
                            {
                                cmb++;
                            }
                        }

                        nominal      = new ComboBox[cmb];
                        numeric      = new TextBox[txt];
                        typeAtt      = new bool[numAtt];
                        this.Height += (numAtt + 1) * t;

                        for (int i = 0; i < numAtt; i++)
                        {
                            if (insts.attribute(i).isNominal())
                            {
                                string[] s1 = insts.attribute(i).toString().Split('{');
                                string[] s2 = s1[1].Split('}');
                                string[] s3 = s2[0].Split(',');

                                nominal[r1] = new ComboBox();
                                labels[i]   = new Label();
                                for (int j = 0; j < s3.Length; j++)
                                {
                                    nominal[r1].Items.Add(s3[j].Replace('\'', ' ').Trim());
                                }
                                labels[i].Text = insts.attribute(i).name();
                                labels[i].Left = x;
                                labels[i].Top  = y;

                                nominal[r1].Left          = x + l;
                                nominal[r1].Top           = y;
                                nominal[r1].DropDownStyle = ComboBoxStyle.DropDownList;
                                y += t;
                                Controls.Add(nominal[r1]);
                                Controls.Add(labels[i]);
                                r1++;
                                typeAtt[i] = true;
                            }
                            else if (insts.attribute(i).isNumeric())
                            {
                                numeric[r2]      = new TextBox();
                                labels[i]        = new Label();
                                labels[i].Text   = insts.attribute(i).name();
                                labels[i].Left   = x;
                                labels[i].Top    = y;
                                numeric[r2].Left = x + l;
                                numeric[r2].Top  = y;
                                y += t;
                                Controls.Add(numeric[r2]);
                                Controls.Add(labels[i]);
                                r2++;
                                typeAtt[i] = false;
                            }

                            btnDiscover.Enabled = true;
                        }
                    }
                }
                catch (Exception e2)
                {
                    MessageBox.Show(e2.Message, "Error Message!");
                }
            }
        }
Esempio n. 27
0
 public override void UnloadModel() => Model = null;
Esempio n. 28
0
 private void btnRun_Click(object sender, EventArgs e)
 {
     m_cls = null;
     backgroundWorker1.RunWorkerAsync(txtWeka.Text);
     progressBar1.MarqueeAnimationSpeed = 100;
     btnRun.Enabled = false;
 }
Esempio n. 29
0
        private void button1_Click(object sender, EventArgs e)
        {
            OpenFileDialog file = new OpenFileDialog();

            if (file.ShowDialog() == DialogResult.OK)
            {
                string filename = file.FileName;
                string filee    = Path.GetFileName(filename);
                bool   attributeType;
                string attributeName      = " ";
                int    numAttributeValue  = 0;
                string attributeValueName = " ";

                textBox1.Text = filee + " chosen succesfully!";

                ///////Decision Tree
                weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader(filename));


                insts.setClassIndex(insts.numAttributes() - 1);

                //find nominal or numeric attributes and create dropbox or textbox
                int numofAttributes = insts.numAttributes() - 1;
                for (int i = 0; i < numofAttributes; i++)
                {
                    attributeType = insts.attribute(i).isNumeric();
                    attributeName = insts.attribute(i).name();
                    dataGridView1.Rows.Add(attributeName);
                    if (attributeType == true)
                    {
                    }
                    else
                    {
                        numAttributeValue = insts.attribute(i).numValues();
                        string[] name = new string[numAttributeValue];
                        for (int j = 0; j < numAttributeValue; j++)
                        {
                            attributeValueName = insts.attribute(i).value(j);
                            name[j]           += attributeValueName;
                        }
                        DataGridViewComboBoxCell combo = new DataGridViewComboBoxCell();
                        combo.DataSource = name.ToList();
                        dataGridView1.Rows[i].Cells[1] = combo;
                    }
                }

                cl = new weka.classifiers.trees.J48();

                textBox2.Text = "Performing " + percentSplit + "% split evaluation.";

                //filling missing values
                weka.filters.Filter missingval = new weka.filters.unsupervised.attribute.ReplaceMissingValues();
                missingval.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, missingval);

                weka.filters.Filter myNormalized = new weka.filters.unsupervised.instance.Normalize();
                myNormalized.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myNormalized);


                //randomize the order of the instances in the dataset.
                weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                myRandom.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myRandom);

                int trainSize             = insts.numInstances() * percentSplit / 100;
                int testSize              = insts.numInstances() - trainSize;
                weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);

                cl.buildClassifier(train);

                string str = cl.toString();

                int numCorrect = 0;
                for (int i = trainSize; i < insts.numInstances(); i++)
                {
                    weka.core.Instance currentInst    = insts.instance(i);
                    double             predictedClass = cl.classifyInstance(currentInst);
                    if (predictedClass == insts.instance(i).classValue())
                    {
                        numCorrect++;
                    }
                }
                textBox3.Text = numCorrect + " out of " + testSize + " correct (" +
                                (double)((double)numCorrect / (double)testSize * 100.0) + "%)";



                //////////Naive Bayes

                //dosya okuma
                weka.core.Instances insts2 = new weka.core.Instances(new java.io.FileReader(filename));
                insts2.setClassIndex(insts2.numAttributes() - 1);

                //naive bayes
                cl2 = new weka.classifiers.bayes.NaiveBayes();


                //filling missing values
                weka.filters.Filter missingval2 = new weka.filters.unsupervised.attribute.ReplaceMissingValues();
                missingval2.setInputFormat(insts2);
                insts2 = weka.filters.Filter.useFilter(insts2, missingval2);

                //for naive bayes
                weka.filters.Filter discrete2 = new weka.filters.unsupervised.attribute.Discretize();
                discrete2.setInputFormat(insts2);
                insts2 = weka.filters.Filter.useFilter(insts2, discrete2);

                //randomize the order of the instances in the dataset. -ortak
                weka.filters.Filter myRandom2 = new weka.filters.unsupervised.instance.Randomize();
                myRandom2.setInputFormat(insts2);
                insts2 = weka.filters.Filter.useFilter(insts2, myRandom2);

                //ortak
                int trainSize2             = insts2.numInstances() * percentSplit / 100;
                int testSize2              = insts2.numInstances() - trainSize2;
                weka.core.Instances train2 = new weka.core.Instances(insts2, 0, trainSize2);

                cl2.buildClassifier(train2);

                string str2 = cl2.toString();

                int numCorrect2 = 0;
                for (int i = trainSize2; i < insts2.numInstances(); i++)
                {
                    weka.core.Instance currentInst2    = insts2.instance(i);
                    double             predictedClass2 = cl2.classifyInstance(currentInst2);
                    if (predictedClass2 == insts2.instance(i).classValue())
                    {
                        numCorrect2++;
                    }
                }
                textBox4.Text = numCorrect2 + " out of " + testSize2 + " correct (" +
                                (double)((double)numCorrect2 / (double)testSize2 * 100.0) + "%)";


                /////////K-Nearest Neigbour

                //dosya okuma
                weka.core.Instances insts3 = new weka.core.Instances(new java.io.FileReader(filename));
                insts3.setClassIndex(insts3.numAttributes() - 1);

                cl3 = new weka.classifiers.lazy.IBk();


                //filling missing values
                weka.filters.Filter missingval3 = new weka.filters.unsupervised.attribute.ReplaceMissingValues();
                missingval3.setInputFormat(insts3);
                insts3 = weka.filters.Filter.useFilter(insts3, missingval3);

                //Convert to dummy attribute knn,svm,neural network
                weka.filters.Filter dummy3 = new weka.filters.unsupervised.attribute.NominalToBinary();
                dummy3.setInputFormat(insts3);
                insts3 = weka.filters.Filter.useFilter(insts3, dummy3);

                //normalize numeric attribute
                weka.filters.Filter myNormalized3 = new weka.filters.unsupervised.instance.Normalize();
                myNormalized3.setInputFormat(insts3);
                insts3 = weka.filters.Filter.useFilter(insts3, myNormalized3);

                //randomize the order of the instances in the dataset.
                weka.filters.Filter myRandom3 = new weka.filters.unsupervised.instance.Randomize();
                myRandom3.setInputFormat(insts3);
                insts3 = weka.filters.Filter.useFilter(insts3, myRandom3);

                int trainSize3             = insts3.numInstances() * percentSplit / 100;
                int testSize3              = insts3.numInstances() - trainSize3;
                weka.core.Instances train3 = new weka.core.Instances(insts3, 0, trainSize3);

                cl3.buildClassifier(train3);

                string str3 = cl3.toString();

                int numCorrect3 = 0;
                for (int i = trainSize3; i < insts3.numInstances(); i++)
                {
                    weka.core.Instance currentInst3    = insts3.instance(i);
                    double             predictedClass3 = cl3.classifyInstance(currentInst3);
                    if (predictedClass3 == insts3.instance(i).classValue())
                    {
                        numCorrect3++;
                    }
                }
                textBox5.Text = numCorrect3 + " out of " + testSize3 + " correct (" +
                                (double)((double)numCorrect3 / (double)testSize3 * 100.0) + "%)";

                //////////Artificial neural network
                //dosya okuma
                weka.core.Instances insts4 = new weka.core.Instances(new java.io.FileReader(filename));
                insts4.setClassIndex(insts4.numAttributes() - 1);

                cl4 = new weka.classifiers.functions.MultilayerPerceptron();


                //filling missing values
                weka.filters.Filter missingval4 = new weka.filters.unsupervised.attribute.ReplaceMissingValues();
                missingval4.setInputFormat(insts4);
                insts4 = weka.filters.Filter.useFilter(insts4, missingval4);

                //Convert to dummy attribute
                weka.filters.Filter dummy4 = new weka.filters.unsupervised.attribute.NominalToBinary();
                dummy4.setInputFormat(insts4);
                insts4 = weka.filters.Filter.useFilter(insts4, dummy4);

                //normalize numeric attribute
                weka.filters.Filter myNormalized4 = new weka.filters.unsupervised.instance.Normalize();
                myNormalized4.setInputFormat(insts4);
                insts4 = weka.filters.Filter.useFilter(insts4, myNormalized4);

                //randomize the order of the instances in the dataset.
                weka.filters.Filter myRandom4 = new weka.filters.unsupervised.instance.Randomize();
                myRandom4.setInputFormat(insts4);
                insts4 = weka.filters.Filter.useFilter(insts4, myRandom4);

                int trainSize4             = insts4.numInstances() * percentSplit / 100;
                int testSize4              = insts4.numInstances() - trainSize4;
                weka.core.Instances train4 = new weka.core.Instances(insts4, 0, trainSize4);

                cl4.buildClassifier(train4);

                string str4 = cl4.toString();

                int numCorrect4 = 0;
                for (int i = trainSize4; i < insts4.numInstances(); i++)
                {
                    weka.core.Instance currentInst4    = insts4.instance(i);
                    double             predictedClass4 = cl4.classifyInstance(currentInst4);
                    if (predictedClass4 == insts4.instance(i).classValue())
                    {
                        numCorrect4++;
                    }
                }

                textBox6.Text = numCorrect4 + " out of " + testSize4 + " correct (" +
                                (double)((double)numCorrect4 / (double)testSize4 * 100.0) + "%)";



                ///////Support Vector Machine
                // dosya okuma
                weka.core.Instances insts5 = new weka.core.Instances(new java.io.FileReader(filename));
                insts5.setClassIndex(insts5.numAttributes() - 1);

                cl5 = new weka.classifiers.functions.SMO();


                //filling missing values
                weka.filters.Filter missingval5 = new weka.filters.unsupervised.attribute.ReplaceMissingValues();
                missingval5.setInputFormat(insts5);
                insts5 = weka.filters.Filter.useFilter(insts5, missingval5);

                //Convert to dummy attribute
                weka.filters.Filter dummy5 = new weka.filters.unsupervised.attribute.NominalToBinary();
                dummy5.setInputFormat(insts5);
                insts5 = weka.filters.Filter.useFilter(insts5, dummy5);

                //normalize numeric attribute
                weka.filters.Filter myNormalized5 = new weka.filters.unsupervised.instance.Normalize();
                myNormalized5.setInputFormat(insts5);
                insts5 = weka.filters.Filter.useFilter(insts5, myNormalized5);

                //randomize the order of the instances in the dataset.
                weka.filters.Filter myRandom5 = new weka.filters.unsupervised.instance.Randomize();
                myRandom5.setInputFormat(insts5);
                insts5 = weka.filters.Filter.useFilter(insts5, myRandom5);

                int trainSize5             = insts5.numInstances() * percentSplit / 100;
                int testSize5              = insts5.numInstances() - trainSize5;
                weka.core.Instances train5 = new weka.core.Instances(insts5, 0, trainSize5);

                cl5.buildClassifier(train5);

                string str5 = cl5.toString();

                int numCorrect5 = 0;
                for (int i = trainSize5; i < insts5.numInstances(); i++)
                {
                    weka.core.Instance currentInst5    = insts5.instance(i);
                    double             predictedClass5 = cl5.classifyInstance(currentInst5);
                    if (predictedClass5 == insts5.instance(i).classValue())
                    {
                        numCorrect5++;
                    }
                }

                textBox7.Text = numCorrect5 + " out of " + testSize5 + " correct (" +
                                (double)((double)numCorrect5 / (double)testSize5 * 100.0) + "%)";



                string result1 = textBox3.Text;
                string output1 = result1.Split('(', ')')[1];
                output1 = output1.Remove(output1.Length - 1);
                double r1 = Convert.ToDouble(output1);

                string result2 = textBox4.Text;
                string output2 = result2.Split('(', ')')[1];
                output2 = output2.Remove(output2.Length - 1);
                double r2 = Convert.ToDouble(output2);

                string result3 = textBox5.Text;
                string output3 = result3.Split('(', ')')[1];
                output3 = output3.Remove(output3.Length - 1);
                double r3 = Convert.ToDouble(output3);

                string result4 = textBox6.Text;
                string output4 = result4.Split('(', ')')[1];
                output4 = output4.Remove(output4.Length - 1);
                double r4 = Convert.ToDouble(output4);

                string result5 = textBox7.Text;
                string output5 = result5.Split('(', ')')[1];
                output5 = output5.Remove(output5.Length - 1);
                double r5 = Convert.ToDouble(output5);


                double[] max_array = new double[] { r1, r2, r3, r4, r5 };

                double max = max_array.Max();
                if (r1 == max)
                {
                    textBox8.Text = "Best Algoritm is Decision Tree Algorithm ";
                }
                else if (r2 == max)
                {
                    textBox8.Text = "Best Algoritm is Naive Bayes Algorithm ";
                }
                else if (r3 == max)
                {
                    textBox8.Text = "Best Algoritm is K-Nearest Neighbour Algorithm ";
                }
                else if (r4 == max)
                {
                    textBox8.Text = "Best Algoritm is Artificial Neural Network Algorithm ";
                }
                else if (r5 == max)
                {
                    textBox8.Text = "Best Algoritm is Support Vector Machine Algorithm ";
                }
            }
        }
Esempio n. 30
0
        private void btnDiscover_Click(object sender, EventArgs e)
        {
            string type  = model.GetType().ToString();
            bool   flag  = false;
            bool   flag2 = false;

            //input kontrolleri
            if (nominal != null)
            {
                for (int i = 0; i < nominal.Length; i++)
                {
                    if (nominal[i].SelectedIndex == -1)
                    {
                        flag = true;
                        break;
                    }
                }
            }
            if (numeric != null)
            {
                for (int i = 0; i < numeric.Length; i++)
                {
                    if (String.IsNullOrEmpty(numeric[i].Text))
                    {
                        flag2 = true;
                        break;
                    }
                }
            }
            if (numAtt == numeric.Length && flag2 == true)
            {
                MessageBox.Show("Please select value!", "Error Message!");
            }
            else if (numAtt == nominal.Length && flag == true)
            {
                MessageBox.Show("Please select value!", "Error Message!");
            }
            else if ((nominal.Length + numeric.Length) == numAtt && (flag == true || flag2 == true))
            {
                MessageBox.Show("Please select value!", "Error Message!");
            }
            else
            {
                weka.core.Instance newIns = new weka.core.Instance(numAtt + 1);
                newIns.setDataset(insts);

                int i1 = 0, i2 = 0;
                for (int i = 0; i < numAtt; i++)
                {
                    //nominal
                    if (typeAtt[i])
                    {
                        newIns.setValue(i, nominal[i1].SelectedItem.ToString());
                        i1++;
                    }
                    //numeric
                    else
                    {
                        newIns.setValue(i, double.Parse(numeric[i2].Text));
                        i2++;
                    }
                }

                weka.core.Instances insts2 = new weka.core.Instances(insts);
                insts2.add(newIns);

                if (type == "weka.classifiers.bayes.NaiveBayes")
                {
                    weka.filters.Filter myDiscretize = new weka.filters.unsupervised.attribute.Discretize();
                    myDiscretize.setInputFormat(insts2);
                    insts2 = weka.filters.Filter.useFilter(insts2, myDiscretize);
                }

                else if (type == "weka.classifiers.functions.Logistic")
                {
                    weka.filters.Filter myDummy = new weka.filters.unsupervised.attribute.NominalToBinary();
                    myDummy.setInputFormat(insts2);
                    insts2 = weka.filters.Filter.useFilter(insts2, myDummy);

                    weka.filters.Filter myNormalize = new weka.filters.unsupervised.instance.Normalize();
                    myNormalize.setInputFormat(insts2);
                    insts2 = weka.filters.Filter.useFilter(insts2, myNormalize);
                }

                else if (type == "new weka.classifiers.lazy.IBk")
                {
                    weka.filters.Filter myDummy = new weka.filters.unsupervised.attribute.NominalToBinary();
                    myDummy.setInputFormat(insts2);
                    insts2 = weka.filters.Filter.useFilter(insts2, myDummy);

                    weka.filters.Filter myNormalize = new weka.filters.unsupervised.instance.Normalize();
                    myNormalize.setInputFormat(insts2);
                    insts2 = weka.filters.Filter.useFilter(insts2, myNormalize);
                }
                else if (type == "weka.classifiers.trees.J48")
                {
                }
                else if (type == "weka.classifiers.trees.RandomForest")
                {
                }
                else if (type == "weka.classifiers.trees.RandomTree")
                {
                }
                else if (type == "weka.classifiers.functions.MultilayerPerceptron")
                {
                    weka.filters.Filter myDummy = new weka.filters.unsupervised.attribute.NominalToBinary();
                    myDummy.setInputFormat(insts2);
                    insts2 = weka.filters.Filter.useFilter(insts2, myDummy);

                    weka.filters.Filter myNormalize = new weka.filters.unsupervised.instance.Normalize();
                    myNormalize.setInputFormat(insts2);
                    insts2 = weka.filters.Filter.useFilter(insts2, myNormalize);
                }
                else if (type == "weka.classifiers.functions.SMO")
                {
                    weka.filters.Filter myDummy = new weka.filters.unsupervised.attribute.NominalToBinary();
                    myDummy.setInputFormat(insts2);
                    insts2 = weka.filters.Filter.useFilter(insts2, myDummy);

                    weka.filters.Filter myNormalize = new weka.filters.unsupervised.instance.Normalize();
                    myNormalize.setInputFormat(insts2);
                    insts2 = weka.filters.Filter.useFilter(insts2, myNormalize);
                }

                double index = model.classifyInstance(insts2.lastInstance());
                //Model okuma kısmı
                weka.classifiers.Classifier cls = (weka.classifiers.Classifier)weka.core.SerializationHelper.read("models/mdl.model");
                lblResult2.Text = "Result= " + insts2.attribute(insts2.numAttributes() - 1).value(Convert.ToInt16(index));
            }
        }
Esempio n. 31
0
        private void Train(string str)
        {
            if (string.IsNullOrEmpty(str))
                return;
            m_cls = CreateClassifier(str);
            if (m_cls == null)
            {
                MessageBox.Show("Can't Create Classifier!");
                return;
            }

            var trainInstances = CreateCurrentInstances();
            m_cls.buildClassifier(trainInstances);

            // TEST
            var data = CreateEmptyInstances();
            StringBuilder sb = new StringBuilder();

            if (m_cls is MLEA.IBatchClassifier)
            {
                MLEA.IBatchClassifier batchClassifier = m_cls as MLEA.IBatchClassifier;
                for (int i = 0; i < XLEN; i++)
                {
                    for (int j = 0; j < YLEN; j++)
                    {
                        var vals = new double[data.numAttributes()];
                        vals[0] = (double)i / XLEN;
                        vals[1] = (double)j / YLEN;

                        var instance = new weka.core.DenseInstance(1.0, vals);
                        data.add(instance);
                        instance.setDataset(data);
                    }
                }

                double[] ds = batchClassifier.classifyInstances(data);
                for (int i = 0; i < XLEN; i++)
                {
                    for (int j = 0; j < YLEN; j++)
                    {
                        double d = ds[i * XLEN + j];

                        if (m_enableLog)
                        {
                            string s = string.Format("{0}, {1}: {2}", data.instance(i * XLEN + j).value(0).ToString("N2"), data.instance(i * XLEN + j).value(1).ToString("N2"), d.ToString("N0"));
                            sb.AppendLine(s);
                        }

                        for (int ii = 0; ii < WXLEN / XLEN; ++ii)
                            for (int jj = 0; jj < WYLEN / YLEN; ++jj)
                                m_pictureBoxBitmap.SetPixel(i * WXLEN / XLEN + ii, j * WYLEN / YLEN + jj, GetValueColor((int)d, false));
                    }
                }
            }
            else
            {
                for (int i = 0; i < XLEN; i++)
                {
                    for (int j = 0; j < YLEN; j++)
                    {
                        var vals = new double[data.numAttributes()];
                        vals[0] = (double)i / XLEN;
                        vals[1] = (double)j / YLEN;

                        var instance = new weka.core.DenseInstance(1.0, vals);
                        data.add(instance);
                        instance.setDataset(data);

                        double d = m_cls.classifyInstance(instance);

                        if (m_enableLog)
                        {
                            string s = string.Format("{0}, {1}: {2}", vals[0].ToString("N2"), vals[1].ToString("N2"), d.ToString("N0"));
                            sb.AppendLine(s);
                        }

                        for (int ii = 0; ii < WXLEN / XLEN; ++ii)
                            for (int jj = 0; jj < WYLEN / YLEN; ++jj)
                                m_pictureBoxBitmap.SetPixel(i * WXLEN / XLEN + ii, j * WYLEN / YLEN + jj, GetValueColor((int)d, false));
                    }
                }
            }
            draw_all_points();

            this.Invoke(new Action(() =>
                {
                    if (m_enableLog)
                    {
                        txtLog.AppendText(sb.ToString());
                    }
                }));

            if (m_enableEvaluation)
            {
                Test(trainInstances);
            }
        }
Esempio n. 32
0
 static public double do_test_single(weka.classifiers.Classifier classifier, weka.core.Instances insts_test)
 {
     weka.core.Instance currentInst = insts_test.lastInstance();
     return(classifier.classifyInstance(currentInst));
 }