Esempio n. 1
0
        public List <string> Classify(string model, string test)
        {
            List <string> ret = new List <string>();

            try
            {
                java.io.ObjectInputStream   ois = new java.io.ObjectInputStream(new java.io.FileInputStream(model));
                weka.classifiers.Classifier cl  = (weka.classifiers.Classifier)ois.readObject();
                ois.close();

                weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader(test));
                insts.setClassIndex(insts.numAttributes() - 1);
                for (int i = 0; i < 1; i++)
                {
                    weka.core.Instance currentInst    = insts.instance(i);
                    double             predictedClass = cl.classifyInstance(currentInst);
                    double[]           distrs         = cl.distributionForInstance(currentInst);
                    //string actual = insts.classAttribute().value((int)currentInst.classValue());
                    //string predicted = insts.classAttribute().value((int)predictedClass);
                    // System.Console.WriteLine("ID: " + (i + 1) + ", " + predicted);
                    for (int j = 0; j < distrs.Length; j++)
                    {
                        string predicted = insts.classAttribute().value(j);
                        string distr     = distrs[j].ToString("#0.000");
                        ret.Add(predicted + "," + distr);
                    }
                }
                return(ret);
            }
            catch
            {
                return(ret);
            }
        }
Esempio n. 2
0
        public void Test()
        {
            weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader("D:\\android_analysis\\attributes.arff"));
            insts.setClassIndex(insts.numAttributes() - 1);

            weka.classifiers.Classifier cl = new weka.classifiers.trees.J48();
            cl.buildClassifier(insts);
            weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
            myRandom.setInputFormat(insts);
            insts = weka.filters.Filter.useFilter(insts, myRandom);

            int trainSize = (int)(insts.numInstances() * 0.66);
            int testSize  = insts.numInstances() - trainSize;

            weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);

            cl.buildClassifier(train);
            for (int i = trainSize; i < insts.numInstances(); i++)
            {
                weka.core.Instance currentInst    = insts.instance(i);
                double             predictedClass = cl.classifyInstance(currentInst);
                double[]           distrs         = cl.distributionForInstance(currentInst);
                string             actual         = insts.classAttribute().value((int)currentInst.classValue());
                string             predicted      = insts.classAttribute().value((int)predictedClass);
                System.Console.WriteLine("ID: " + (i + 1) + ", " + actual + " --> " + predicted);
            }
        }
Esempio n. 3
0
    public static void Main(String[] args)
    {
        try
        {
            // Load the model
            java.io.ObjectInputStream   stream       = new java.io.ObjectInputStream(new java.io.FileInputStream("iris_j48.model"));
            weka.classifiers.Classifier qhClassifier = (weka.classifiers.Classifier)stream.readObject();
            stream.close();

            // This model was trained on 66% of instances from the iris dataset. Test the model on remaining 34% instances.
            weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader("iris.arff"));
            insts.setClassIndex(insts.numAttributes() - 1);
            int percentSplit = 66;
            int trainSize    = insts.numInstances() * percentSplit / 100;
            int testSize     = insts.numInstances() - trainSize;
            int numCorrect   = 0;
            for (int i = trainSize; i < insts.numInstances(); i++)
            {
                weka.core.Instance currentInst    = insts.instance(i);
                double             predictedClass = qhClassifier.classifyInstance(currentInst);
                if (predictedClass == insts.instance(i).classValue())
                {
                    numCorrect++;
                }
            }
            Console.WriteLine(numCorrect + " out of " + testSize + " correct (" + (double)((double)numCorrect / (double)testSize * 100.0) + "%)");
        }
        catch (java.lang.Exception e)
        {
            e.printStackTrace();
        }
    }
    public void UpdateClassifierFromInstanceString(String line)
    {
        try {
            String[] lineSplit = line.Split(',');
            double[] fullData  = new double[lineSplit.Length - 1];
            for (int j = 0; j < lineSplit.Length - 1; j++)
            {
                fullData[j] = double.Parse(lineSplit[j]);
            }

            String weightS = lineSplit[lineSplit.Length - 1];
            //Debug.Log(weightS);
            weightS = weightS.Remove(weightS.Length - 1, 1);
            if (weightS[weightS.Length - 1] == '}')
            {
                weightS = weightS.Remove(weightS.Length - 1, 1);
            }
            weightS = weightS.Remove(0, 1);
            //Debug.Log(weightS);
            double weight = double.Parse(weightS);

            weka.core.Instance newInstance = new weka.core.Instance(weight, fullData);
            playerData.add(newInstance);
            classifier.buildClassifier(playerData);
        }
        catch (java.lang.Exception ex)
        {
            Debug.LogError(ex.getMessage());
        }
    }
Esempio n. 5
0
        public void test_ToStrings_gets_the_string_representation_of_the_instance()
        {
            Runtime rt = TestingHelpers.LoadSmallRuntime <TitanicDataRow2>("titanic_train.csv", 0, 3);

            weka.core.Instance instance = rt[0].Impl;
            System.Collections.Generic.IEnumerable <string> strs = instance.ToStrings();
            Assert.AreEqual(new[] { "0", "3", "Braund, Mr. Owen Harris", "male", "22", "1", "0", "A/5 21171", "7.25", "?", "S" }, strs);
        }
Esempio n. 6
0
 public override double classifyInstance(Instance instance)
 {
     double x = instance.value(idxAttribute);
     double p = GetP(x);
     if (p < epsilon)
         return 1;
     else
         return 0;
 }
Esempio n. 7
0
        //Only 1 output: last instance
        static public string do_Classification_bySerialClassfier_1out_standAlone
            (SerializedClassifier serialClassifier, weka.core.Instances instances, int colClass)
        {
            instances.setClassIndex(colClass);
            weka.core.Instance each           = instances.instance(instances.numInstances() - 1);
            double             predictedClass = serialClassifier.classifyInstance(each);

            return(instances.classAttribute().value((int)predictedClass));
        }
Esempio n. 8
0
        public override double classifyInstance(weka.core.Instance instance)
        {
            var d = base.classifyInstance(instance);

            if (d == 3)
            {
                return(2);
            }
            return(d);
        }
Esempio n. 9
0
        protected override string instanceToLibsvm(weka.core.Instance inst)
        {
            //StringBuffer.__<clinit>();
            //StringBuffer buffer = new StringBuffer(new StringBuilder().append("").append(inst.classValue()).toString());
            StringBuffer buffer = new StringBuffer(new StringBuilder().append("").append(inst.classValue() - 1).toString());

            for (int i = 0; i < inst.numAttributes(); i++)
            {
                if ((i != inst.classIndex()) && (inst.value(i) != 0f))
                {
                    buffer.append(new StringBuilder().append(" ").append((int)(i + 1)).append(":").append(inst.value(i)).toString());
                }
            }
            return(buffer.toString());
        }
Esempio n. 10
0
        public static void CalculateSuccessForAnn(weka.core.Instances originalInsts)
        {
            try
            {
                var form = Form.ActiveForm as Form1;

                form.successPrcAnn.Text = "Training...";
                form.successRtAnn.Text  = "../" + testSize;

                weka.core.Instances insts = originalInsts;

                // Pre-process
                insts = ConvertNominalToNumeric(insts);
                insts = Normalize(insts);

                // Classify
                weka.classifiers.Classifier cl    = new weka.classifiers.functions.MultilayerPerceptron();
                weka.core.Instances         train = new weka.core.Instances(insts, 0, trainSize);
                cl.buildClassifier(train);

                int    numCorrect = 0;
                double percentage = 0;
                for (int i = trainSize; i < insts.numInstances(); i++)
                {
                    weka.core.Instance currentInst    = insts.instance(i);
                    double             predictedClass = cl.classifyInstance(currentInst);
                    if (predictedClass == insts.instance(i).classValue())
                    {
                        numCorrect++;
                    }

                    percentage              = (double)numCorrect / (double)testSize * 100.0;
                    form.successRtAnn.Text  = numCorrect + "/" + testSize;
                    form.successPrcAnn.Text = String.Format("{0:0.00}", percentage) + "%";
                }
                succesRates.Add(Classifier.ANN, percentage);
                classifiers.Add(Classifier.ANN, cl);
            }
            catch (java.lang.Exception ex)
            {
                ex.printStackTrace();
                MessageBox.Show(ex.ToString(), "Error for Neural Network", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
            catch (Exception)
            {
                MessageBox.Show("Error for  Neural Network", "Error for  Neural Network", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
        }
Esempio n. 11
0
        public double GetVolume(weka.core.Instance instance)
        {
            //double v = m_counts[2] * m_tp - m_counts[0] * m_sl + (m_tp - 1.5 * m_sl) * m_counts[1] / 5;
            //if (v < 0)
            //    return 0;
            //v /= 1000;
            double sum = (m_counts[0] + m_counts[1] + m_counts[2]);

            if (sum == 0)
            {
                return(0);
            }
            double v = m_counts[2] / sum;

            return(Math.Round(v, 2));
        }
Esempio n. 12
0
        //Knn
        public static double Knn(weka.core.Instances insts)
        {
            try
            {
                insts.setClassIndex(insts.numAttributes() - 1);

                Knncl = new weka.classifiers.lazy.IBk();

                weka.filters.Filter myDummy = new weka.filters.unsupervised.attribute.NominalToBinary();
                myDummy.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myDummy);

                weka.filters.Filter myNormalize = new weka.filters.unsupervised.instance.Normalize();
                myNormalize.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myNormalize);

                weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                myRandom.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myRandom);

                int trainSize             = insts.numInstances() * percentSplit / 100;
                int testSize              = insts.numInstances() - trainSize;
                weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);

                Knncl.buildClassifier(train);


                int numCorrect = 0;
                for (int i = trainSize; i < insts.numInstances(); i++)
                {
                    weka.core.Instance currentInst    = insts.instance(i);
                    double             predictedClass = Knncl.classifyInstance(currentInst);
                    if (predictedClass == insts.instance(i).classValue())
                    {
                        numCorrect++;
                    }
                }
                return((double)numCorrect / (double)testSize * 100.0);
            }
            catch (java.lang.Exception ex)
            {
                ex.printStackTrace();
                return(0);
            }
        }
Esempio n. 13
0
        public void Test2()
        {
            java.io.ObjectInputStream   ois = new java.io.ObjectInputStream(new java.io.FileInputStream("D:\\android_analysis\\som_model.model"));
            weka.classifiers.Classifier cl  = (weka.classifiers.Classifier)ois.readObject();
            ois.close();

            weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader("D:\\android_analysis\\test1.arff"));
            insts.setClassIndex(insts.numAttributes() - 1);
            for (int i = 0; i < insts.numInstances(); i++)
            {
                weka.core.Instance currentInst    = insts.instance(i);
                double             predictedClass = cl.classifyInstance(currentInst);
                double[]           distrs         = cl.distributionForInstance(currentInst);
                //string actual = insts.classAttribute().value((int)currentInst.classValue());
                //string predicted = insts.classAttribute().value((int)predictedClass);
                // System.Console.WriteLine("ID: " + (i + 1) + ", " + predicted);
            }
        }
Esempio n. 14
0
        public void test_binarisation_in_instances_builder()
        {
            TestingRow[] rows = new[] {
                new TestingRow {
                    CLASS = 1, ATT_1 = 111, ATT_2 = 222, ATT_3 = 333, ATT_4 = 444, ATT_5 = 555, ATT_6 = 666, ATT_7 = 777, ATT_8 = 888
                },
                new TestingRow {
                    CLASS = 0, ATT_1 = 1112, ATT_2 = 2222, ATT_3 = 3332, ATT_4 = 4442, ATT_5 = 5552, ATT_6 = 6662, ATT_7 = 7772, ATT_8 = 8882
                }
            };
            weka.core.Instances instances = new InstancesBuilder <TestingRow>(rows, 0).Build();

            Assert.AreEqual(25, instances.numAttributes());

            weka.core.Instance i1 = instances.instance(0);
            weka.core.Instance i2 = instances.instance(1);

            // These are the standard properties
            Assert.AreEqual(1.0, i1.value(0)); Assert.AreEqual(1.0, i1.classValue());
            Assert.AreEqual(0.0, i2.value(0)); Assert.AreEqual(0.0, i2.classValue());
            Assert.AreEqual(0.0, i1.value(1)); Assert.AreEqual("111", i1.stringValue(1));
            Assert.AreEqual(1.0, i2.value(1)); Assert.AreEqual("1112", i2.stringValue(1));
            Assert.AreEqual(0.0, i1.value(2)); Assert.AreEqual("222", i1.stringValue(2));
            Assert.AreEqual(1.0, i2.value(2)); Assert.AreEqual("2222", i2.stringValue(2));
            Assert.AreEqual(0.0, i1.value(3)); Assert.AreEqual("333", i1.stringValue(3));
            Assert.AreEqual(1.0, i2.value(3)); Assert.AreEqual("3332", i2.stringValue(3));
            Assert.AreEqual(0.0, i1.value(4)); Assert.AreEqual("444", i1.stringValue(4));
            Assert.AreEqual(1.0, i2.value(4)); Assert.AreEqual("4442", i2.stringValue(4));
            Assert.AreEqual(0.0, i1.value(5)); Assert.AreEqual("555", i1.stringValue(5));
            Assert.AreEqual(1.0, i2.value(5)); Assert.AreEqual("5552", i2.stringValue(5));
            Assert.AreEqual(0.0, i1.value(6)); Assert.AreEqual("666", i1.stringValue(6));
            Assert.AreEqual(1.0, i2.value(6)); Assert.AreEqual("6662", i2.stringValue(6));
            Assert.AreEqual(0.0, i1.value(7)); Assert.AreEqual("777", i1.stringValue(7));
            Assert.AreEqual(1.0, i2.value(7)); Assert.AreEqual("7772", i2.stringValue(7));
            Assert.AreEqual(0.0, i1.value(8)); Assert.AreEqual("888", i1.stringValue(8));
            Assert.AreEqual(1.0, i2.value(8)); Assert.AreEqual("8882", i2.stringValue(8));

            // These are the new binarized
            for (int i = 9; i < 25; i += 2)
            {
                Assert.AreEqual(1.0, i1.value(i)); Assert.AreEqual(0.0, i2.value(i));
                Assert.AreEqual(0.0, i1.value(i + 1)); Assert.AreEqual(1.0, i2.value(i + 1));
            }
        }
Esempio n. 15
0
        public static string classifyTest()
        {
            try
            {
                String result = "";

                weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader("C:\\Program Files\\Weka-3-7\\data\\iris.arff"));
                insts.setClassIndex(insts.numAttributes() - 1);

                weka.classifiers.Classifier cl = new weka.classifiers.trees.J48();
                //  Console.WriteLine("Performing " + percentSplit + "% split evaluation.");
                result += "Performing " + percentSplit + "% split evaluation.\n";
                //randomize the order of the instances in the dataset.
                weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                myRandom.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myRandom);

                int trainSize             = insts.numInstances() * percentSplit / 100;
                int testSize              = insts.numInstances() - trainSize;
                weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);

                cl.buildClassifier(train);
                int numCorrect = 0;
                for (int i = trainSize; i < insts.numInstances(); i++)
                {
                    weka.core.Instance currentInst    = insts.instance(i);
                    double             predictedClass = cl.classifyInstance(currentInst);
                    if (predictedClass == insts.instance(i).classValue())
                    {
                        numCorrect++;
                    }
                }
                //Console.WriteLine(numCorrect + " out of " + testSize + " correct (" + (double)((double)numCorrect / (double)testSize * 100.0) + "%)");
                result += (numCorrect + " out of " + testSize + " correct (" + (double)((double)numCorrect / (double)testSize * 100.0) + "%)");

                return(result);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
                return(ex.Message);
            }
        }
    public void UpdateClassifier(double[] newMapData, int lastRating)
    {
        try
        {
            // Copy data to a new array and add the rating (the class of this instance)
            double [] fullData = new double[newMapData.Length + 1];
            for (int i = 0; i < newMapData.Length; i++)
            {
                fullData[i] = newMapData[i];
            }
            //*********fullData[fullData.Length-1] = (double) lastRating;
            fullData[fullData.Length - 1] = (double)((lastRating - 1) / 3);
            //Debug.LogWarning(fullData[fullData.Length-1]);
            double weight = 0;
            if (lastRating == 1 || lastRating == 6)
            {
                weight = 2;
            }
            else if (lastRating == 2 || lastRating == 5)
            {
                weight = 1;
            }
            else
            {
                weight = 0.5;
            }

            // Naive Bayes defaults all data to weight of 1, do same for this instance
            //*******weka.core.Instance newInstance = new weka.core.Instance(1,fullData);
            weka.core.Instance newInstance = new weka.core.Instance(weight, fullData);
            playerData.add(newInstance);

            // This version of Naive Bayes is not updateable, so just rebuild the classifier
            // Updateable version has slightly lower accuracy
            classifier.buildClassifier(playerData);
        }
        catch (java.lang.Exception ex)
        {
            Debug.LogError(ex.getMessage());
        }
    }
Esempio n. 17
0
        /// <summary>
        /// Uses the classifier to classify an instance (from its featureValues).
        /// </summary>
        /// <param name="featureValues">An array of doubles that describe the instance.</param>
        /// <returns>The string name of the classification of the instance.</returns>
        public string classify(double[] featureValues)
        {
            //if (!classifierBuilt) { _classifier.buildClassifier(_dataSet); classifierBuilt = true; }

            weka.core.Instance inst = new weka.core.Instance(1, featureValues);
            inst.setDataset(_dataSet);

            double result = _classifier.classifyInstance(inst);

            weka.core.Attribute attribute = _dataSet.attribute(_dataSet.numAttributes() - 1);
            string resultName             = attribute.value((int)result);

            // Get rid of this line once ARFF files are rewritten
            if (resultName == "Label")
            {
                resultName = "Text";
            }

            //Console.WriteLine(resultName);
            return(resultName);
        }
Esempio n. 18
0
 public override double classifyInstance(Instance instance)
 {
     int r = 2;
     if (m_prop <= 0)
     {
         r = 2;
     }
     else
     {
         if (x == y)
         {
             if (w > z)
                 r = lastR;
             else
                 r = 2;
         }
         else if (x > y)
         {
             return 0;
         }
         else if (y > x)
         {
             return 1;
         }
         else
         {
             return 2;
         }
     }
     //lastR = r;
     return r;
     //return (new Random()).NextDouble() > 0.5 ? 1 : 0;
     //double v = m_innerClassifier.classifyInstance(instance);
     //if (v == 0)
     //    return 0;
     //else if (v == 1)
     //    return 1;
     //else
     //    return 2;
 }
Esempio n. 19
0
        private void testButton_Click(object sender, EventArgs e)
        {
            var form = Form.ActiveForm as Form1;

            if (readyToTest)
            {
                weka.classifiers.Classifier cl   = classifiers[highestSuccessRate.Key];
                weka.core.Instance          inst = new weka.core.Instance(insts.numAttributes() - 1);
                inst.setDataset(insts);
                for (int i = 0; i < inputObjects.Count; i++)
                {
                    if (inputObjects[i].numeric)
                    {
                        inst.setValue(i, Decimal.ToDouble(inputObjects[i].num.Value));
                    }
                    else
                    {
                        inst.setValue(i, inputObjects[i].nom.SelectedItem.ToString());
                    }
                }


                try
                {
                    string[] values      = insts.attribute(insts.numAttributes() - 1).toString().Split('{', '}')[1].Split(',');
                    double   classOfData = cl.classifyInstance(inst);
                    int      idx         = Convert.ToInt32(classOfData);
                    form.testResult.Text = values[idx];
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.Message, "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
                }
            }
            else
            {
                MessageBox.Show("Program is not ready to test, probably needs to process data first.", "Not ready", MessageBoxButtons.OK, MessageBoxIcon.Information);
            }
        }
            public int InitializeClassifier(string[] atributes, string[] gestures, string classAttribute, string modelLocation)
            {
                java.io.ObjectInputStream ois = new java.io.ObjectInputStream(new java.io.FileInputStream(modelLocation));


                m_cl = (weka.classifiers.Classifier)ois.readObject();

                //Declare the feature vector
                weka.core.FastVector fvWekaFeatureVector = new weka.core.FastVector(atributes.Length + 1);
                for (int i = 0; i < atributes.Length; i++)
                {
                    weka.core.Attribute aux = new weka.core.Attribute(atributes[i]);
                    fvWekaFeatureVector.addElement(aux);
                }


                //Declare the class weka.core.Attribute along with its values
                weka.core.FastVector fvClassValues = new weka.core.FastVector(gestures.Length);
                for (int i = 0; i < gestures.Length; i++)
                {
                    weka.core.Attribute z1 = new weka.core.Attribute(atributes[i]);
                    fvClassValues.addElement(gestures[i]);
                }
                //fvClassValues.addElement("yes");
                //fvClassValues.addElement("no");

                weka.core.Attribute ClassAttribute = new weka.core.Attribute(classAttribute, fvClassValues);

                fvWekaFeatureVector.addElement(ClassAttribute);

                dataSet = new weka.core.Instances("TestRel", fvWekaFeatureVector, 10);
                dataSet.setClassIndex(atributes.Length);

                testInstance = new weka.core.Instance(atributes.Length + 1);

                return(1);
            }
Esempio n. 21
0
		/// <summary> Input an instance for filtering. Ordinarily the instance is
		/// processed and made available for output immediately. Some filters
		/// require all instances be read before producing output, in which
		/// case output instances should be collected after calling
		/// batchFinished(). If the input marks the start of a new batch, the
		/// output queue is cleared. This default implementation assumes all
		/// instance conversion will occur when batchFinished() is called.
		/// 
		/// </summary>
		/// <param name="instance">the input instance
		/// </param>
		/// <returns> true if the filtered instance may now be
		/// collected with output().
		/// </returns>
		/// <exception cref="NullPointerException">if the input format has not been
		/// defined.
		/// </exception>
		/// <exception cref="Exception">if the input instance was not of the correct 
		/// format or if there was a problem with the filtering.  
		/// </exception>
		public virtual bool input(Instance instance)
		{
			
			if (m_InputFormat == null)
			{
				throw new System.NullReferenceException("No input instance format defined");
			}
			if (m_NewBatch)
			{
				m_OutputQueue = new Queue();
				m_NewBatch = false;
			}
			bufferInput(instance);
			return false;
		}
Esempio n. 22
0
		/// <summary> Takes string values referenced by an Instance and copies them from a
		/// source dataset to a destination dataset. The instance references are
		/// updated to be valid for the destination dataset. The instance may have the 
		/// structure (i.e. number and attribute position) of either dataset (this
		/// affects where references are obtained from). Only works if the number
		/// of string attributes is the same in both indices (implicitly these string
		/// attributes should be semantically same but just with shifted positions).
		/// 
		/// </summary>
		/// <param name="instance">the instance containing references to strings in the source
		/// dataset that will have references updated to be valid for the destination
		/// dataset.
		/// </param>
		/// <param name="instSrcCompat">true if the instance structure is the same as the
		/// source, or false if it is the same as the destination (i.e. which of the
		/// string attribute indices contains the correct locations for this instance).
		/// </param>
		/// <param name="srcDataset">the dataset for which the current instance string
		/// references are valid (after any position mapping if needed)
		/// </param>
		/// <param name="srcStrAtts">an array containing the indices of string attributes
		/// in the source datset.
		/// </param>
		/// <param name="destDataset">the dataset for which the current instance string
		/// references need to be inserted (after any position mapping if needed)
		/// </param>
		/// <param name="destStrAtts">an array containing the indices of string attributes
		/// in the destination datset.
		/// </param>
		protected internal virtual void  copyStringValues(Instance instance, bool instSrcCompat, Instances srcDataset, int[] srcStrAtts, Instances destDataset, int[] destStrAtts)
		{
			if (srcDataset == destDataset)
			{
				return ;
			}
			if (srcStrAtts.Length != destStrAtts.Length)
			{
				throw new System.ArgumentException("Src and Dest string indices differ in length!!");
			}
			for (int i = 0; i < srcStrAtts.Length; i++)
			{
				int instIndex = instSrcCompat?srcStrAtts[i]:destStrAtts[i];
				Attribute src = srcDataset.attribute(srcStrAtts[i]);
				Attribute dest = destDataset.attribute(destStrAtts[i]);
				if (!instance.isMissing(instIndex))
				{
					//System.err.println(instance.value(srcIndex) 
					//                   + " " + src.numValues()
					//                   + " " + dest.numValues());
					//UPGRADE_WARNING: Data types in Visual C# might be different.  Verify the accuracy of narrowing conversions. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1042'"
					int valIndex = dest.addStringValue(src, (int) instance.value_Renamed(instIndex));
					// setValue here shouldn't be too slow here unless your dataset has
					// squillions of string attributes
					instance.setValue(instIndex, (double) valIndex);
				}
			}
		}
 public double[] ClassifyNewData(double[] newData)
 {
     weka.core.Instance newInstance = new weka.core.Instance(1,newData);
     newInstance.setDataset(playerData);
     return classifier.distributionForInstance(newInstance);
 }
    public void UpdateClassifier(double[] newMapData, int lastRating)
    {
        try
        {
            // Copy data to a new array and add the rating (the class of this instance)
            double [] fullData = new double[newMapData.Length+1];
            for (int i = 0; i < newMapData.Length; i++)
                fullData[i] = newMapData[i];
            //*********fullData[fullData.Length-1] = (double) lastRating;
            fullData[fullData.Length-1] = (double)((lastRating-1)/3);
            //Debug.LogWarning(fullData[fullData.Length-1]);
            double weight = 0;
            if (lastRating == 1 || lastRating == 6)
                weight = 2;
            else if (lastRating == 2 || lastRating == 5)
                weight = 1;
            else
                weight = 0.5;

            // Naive Bayes defaults all data to weight of 1, do same for this instance
            //*******weka.core.Instance newInstance = new weka.core.Instance(1,fullData);
            weka.core.Instance newInstance = new weka.core.Instance(weight,fullData);
            playerData.add(newInstance);

            // This version of Naive Bayes is not updateable, so just rebuild the classifier
            // Updateable version has slightly lower accuracy
            classifier.buildClassifier(playerData);
        }
        catch (java.lang.Exception ex)
        {
            Debug.LogError(ex.getMessage());
        }
    }
Esempio n. 25
0
        private void button1_Click(object sender, EventArgs e)
        {
            OpenFileDialog file = new OpenFileDialog();

            if (file.ShowDialog() == DialogResult.OK)
            {
                string filename = file.FileName;
                string filee    = Path.GetFileName(filename);
                bool   attributeType;
                string attributeName      = " ";
                int    numAttributeValue  = 0;
                string attributeValueName = " ";

                textBox1.Text = filee + " chosen succesfully!";

                ///////Decision Tree
                weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader(filename));


                insts.setClassIndex(insts.numAttributes() - 1);

                //find nominal or numeric attributes and create dropbox or textbox
                int numofAttributes = insts.numAttributes() - 1;
                for (int i = 0; i < numofAttributes; i++)
                {
                    attributeType = insts.attribute(i).isNumeric();
                    attributeName = insts.attribute(i).name();
                    dataGridView1.Rows.Add(attributeName);
                    if (attributeType == true)
                    {
                    }
                    else
                    {
                        numAttributeValue = insts.attribute(i).numValues();
                        string[] name = new string[numAttributeValue];
                        for (int j = 0; j < numAttributeValue; j++)
                        {
                            attributeValueName = insts.attribute(i).value(j);
                            name[j]           += attributeValueName;
                        }
                        DataGridViewComboBoxCell combo = new DataGridViewComboBoxCell();
                        combo.DataSource = name.ToList();
                        dataGridView1.Rows[i].Cells[1] = combo;
                    }
                }

                cl = new weka.classifiers.trees.J48();

                textBox2.Text = "Performing " + percentSplit + "% split evaluation.";

                //filling missing values
                weka.filters.Filter missingval = new weka.filters.unsupervised.attribute.ReplaceMissingValues();
                missingval.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, missingval);

                weka.filters.Filter myNormalized = new weka.filters.unsupervised.instance.Normalize();
                myNormalized.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myNormalized);


                //randomize the order of the instances in the dataset.
                weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                myRandom.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myRandom);

                int trainSize             = insts.numInstances() * percentSplit / 100;
                int testSize              = insts.numInstances() - trainSize;
                weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);

                cl.buildClassifier(train);

                string str = cl.toString();

                int numCorrect = 0;
                for (int i = trainSize; i < insts.numInstances(); i++)
                {
                    weka.core.Instance currentInst    = insts.instance(i);
                    double             predictedClass = cl.classifyInstance(currentInst);
                    if (predictedClass == insts.instance(i).classValue())
                    {
                        numCorrect++;
                    }
                }
                textBox3.Text = numCorrect + " out of " + testSize + " correct (" +
                                (double)((double)numCorrect / (double)testSize * 100.0) + "%)";



                //////////Naive Bayes

                //dosya okuma
                weka.core.Instances insts2 = new weka.core.Instances(new java.io.FileReader(filename));
                insts2.setClassIndex(insts2.numAttributes() - 1);

                //naive bayes
                cl2 = new weka.classifiers.bayes.NaiveBayes();


                //filling missing values
                weka.filters.Filter missingval2 = new weka.filters.unsupervised.attribute.ReplaceMissingValues();
                missingval2.setInputFormat(insts2);
                insts2 = weka.filters.Filter.useFilter(insts2, missingval2);

                //for naive bayes
                weka.filters.Filter discrete2 = new weka.filters.unsupervised.attribute.Discretize();
                discrete2.setInputFormat(insts2);
                insts2 = weka.filters.Filter.useFilter(insts2, discrete2);

                //randomize the order of the instances in the dataset. -ortak
                weka.filters.Filter myRandom2 = new weka.filters.unsupervised.instance.Randomize();
                myRandom2.setInputFormat(insts2);
                insts2 = weka.filters.Filter.useFilter(insts2, myRandom2);

                //ortak
                int trainSize2             = insts2.numInstances() * percentSplit / 100;
                int testSize2              = insts2.numInstances() - trainSize2;
                weka.core.Instances train2 = new weka.core.Instances(insts2, 0, trainSize2);

                cl2.buildClassifier(train2);

                string str2 = cl2.toString();

                int numCorrect2 = 0;
                for (int i = trainSize2; i < insts2.numInstances(); i++)
                {
                    weka.core.Instance currentInst2    = insts2.instance(i);
                    double             predictedClass2 = cl2.classifyInstance(currentInst2);
                    if (predictedClass2 == insts2.instance(i).classValue())
                    {
                        numCorrect2++;
                    }
                }
                textBox4.Text = numCorrect2 + " out of " + testSize2 + " correct (" +
                                (double)((double)numCorrect2 / (double)testSize2 * 100.0) + "%)";


                /////////K-Nearest Neigbour

                //dosya okuma
                weka.core.Instances insts3 = new weka.core.Instances(new java.io.FileReader(filename));
                insts3.setClassIndex(insts3.numAttributes() - 1);

                cl3 = new weka.classifiers.lazy.IBk();


                //filling missing values
                weka.filters.Filter missingval3 = new weka.filters.unsupervised.attribute.ReplaceMissingValues();
                missingval3.setInputFormat(insts3);
                insts3 = weka.filters.Filter.useFilter(insts3, missingval3);

                //Convert to dummy attribute knn,svm,neural network
                weka.filters.Filter dummy3 = new weka.filters.unsupervised.attribute.NominalToBinary();
                dummy3.setInputFormat(insts3);
                insts3 = weka.filters.Filter.useFilter(insts3, dummy3);

                //normalize numeric attribute
                weka.filters.Filter myNormalized3 = new weka.filters.unsupervised.instance.Normalize();
                myNormalized3.setInputFormat(insts3);
                insts3 = weka.filters.Filter.useFilter(insts3, myNormalized3);

                //randomize the order of the instances in the dataset.
                weka.filters.Filter myRandom3 = new weka.filters.unsupervised.instance.Randomize();
                myRandom3.setInputFormat(insts3);
                insts3 = weka.filters.Filter.useFilter(insts3, myRandom3);

                int trainSize3             = insts3.numInstances() * percentSplit / 100;
                int testSize3              = insts3.numInstances() - trainSize3;
                weka.core.Instances train3 = new weka.core.Instances(insts3, 0, trainSize3);

                cl3.buildClassifier(train3);

                string str3 = cl3.toString();

                int numCorrect3 = 0;
                for (int i = trainSize3; i < insts3.numInstances(); i++)
                {
                    weka.core.Instance currentInst3    = insts3.instance(i);
                    double             predictedClass3 = cl3.classifyInstance(currentInst3);
                    if (predictedClass3 == insts3.instance(i).classValue())
                    {
                        numCorrect3++;
                    }
                }
                textBox5.Text = numCorrect3 + " out of " + testSize3 + " correct (" +
                                (double)((double)numCorrect3 / (double)testSize3 * 100.0) + "%)";

                //////////Artificial neural network
                //dosya okuma
                weka.core.Instances insts4 = new weka.core.Instances(new java.io.FileReader(filename));
                insts4.setClassIndex(insts4.numAttributes() - 1);

                cl4 = new weka.classifiers.functions.MultilayerPerceptron();


                //filling missing values
                weka.filters.Filter missingval4 = new weka.filters.unsupervised.attribute.ReplaceMissingValues();
                missingval4.setInputFormat(insts4);
                insts4 = weka.filters.Filter.useFilter(insts4, missingval4);

                //Convert to dummy attribute
                weka.filters.Filter dummy4 = new weka.filters.unsupervised.attribute.NominalToBinary();
                dummy4.setInputFormat(insts4);
                insts4 = weka.filters.Filter.useFilter(insts4, dummy4);

                //normalize numeric attribute
                weka.filters.Filter myNormalized4 = new weka.filters.unsupervised.instance.Normalize();
                myNormalized4.setInputFormat(insts4);
                insts4 = weka.filters.Filter.useFilter(insts4, myNormalized4);

                //randomize the order of the instances in the dataset.
                weka.filters.Filter myRandom4 = new weka.filters.unsupervised.instance.Randomize();
                myRandom4.setInputFormat(insts4);
                insts4 = weka.filters.Filter.useFilter(insts4, myRandom4);

                int trainSize4             = insts4.numInstances() * percentSplit / 100;
                int testSize4              = insts4.numInstances() - trainSize4;
                weka.core.Instances train4 = new weka.core.Instances(insts4, 0, trainSize4);

                cl4.buildClassifier(train4);

                string str4 = cl4.toString();

                int numCorrect4 = 0;
                for (int i = trainSize4; i < insts4.numInstances(); i++)
                {
                    weka.core.Instance currentInst4    = insts4.instance(i);
                    double             predictedClass4 = cl4.classifyInstance(currentInst4);
                    if (predictedClass4 == insts4.instance(i).classValue())
                    {
                        numCorrect4++;
                    }
                }

                textBox6.Text = numCorrect4 + " out of " + testSize4 + " correct (" +
                                (double)((double)numCorrect4 / (double)testSize4 * 100.0) + "%)";



                ///////Support Vector Machine
                // dosya okuma
                weka.core.Instances insts5 = new weka.core.Instances(new java.io.FileReader(filename));
                insts5.setClassIndex(insts5.numAttributes() - 1);

                cl5 = new weka.classifiers.functions.SMO();


                //filling missing values
                weka.filters.Filter missingval5 = new weka.filters.unsupervised.attribute.ReplaceMissingValues();
                missingval5.setInputFormat(insts5);
                insts5 = weka.filters.Filter.useFilter(insts5, missingval5);

                //Convert to dummy attribute
                weka.filters.Filter dummy5 = new weka.filters.unsupervised.attribute.NominalToBinary();
                dummy5.setInputFormat(insts5);
                insts5 = weka.filters.Filter.useFilter(insts5, dummy5);

                //normalize numeric attribute
                weka.filters.Filter myNormalized5 = new weka.filters.unsupervised.instance.Normalize();
                myNormalized5.setInputFormat(insts5);
                insts5 = weka.filters.Filter.useFilter(insts5, myNormalized5);

                //randomize the order of the instances in the dataset.
                weka.filters.Filter myRandom5 = new weka.filters.unsupervised.instance.Randomize();
                myRandom5.setInputFormat(insts5);
                insts5 = weka.filters.Filter.useFilter(insts5, myRandom5);

                int trainSize5             = insts5.numInstances() * percentSplit / 100;
                int testSize5              = insts5.numInstances() - trainSize5;
                weka.core.Instances train5 = new weka.core.Instances(insts5, 0, trainSize5);

                cl5.buildClassifier(train5);

                string str5 = cl5.toString();

                int numCorrect5 = 0;
                for (int i = trainSize5; i < insts5.numInstances(); i++)
                {
                    weka.core.Instance currentInst5    = insts5.instance(i);
                    double             predictedClass5 = cl5.classifyInstance(currentInst5);
                    if (predictedClass5 == insts5.instance(i).classValue())
                    {
                        numCorrect5++;
                    }
                }

                textBox7.Text = numCorrect5 + " out of " + testSize5 + " correct (" +
                                (double)((double)numCorrect5 / (double)testSize5 * 100.0) + "%)";



                string result1 = textBox3.Text;
                string output1 = result1.Split('(', ')')[1];
                output1 = output1.Remove(output1.Length - 1);
                double r1 = Convert.ToDouble(output1);

                string result2 = textBox4.Text;
                string output2 = result2.Split('(', ')')[1];
                output2 = output2.Remove(output2.Length - 1);
                double r2 = Convert.ToDouble(output2);

                string result3 = textBox5.Text;
                string output3 = result3.Split('(', ')')[1];
                output3 = output3.Remove(output3.Length - 1);
                double r3 = Convert.ToDouble(output3);

                string result4 = textBox6.Text;
                string output4 = result4.Split('(', ')')[1];
                output4 = output4.Remove(output4.Length - 1);
                double r4 = Convert.ToDouble(output4);

                string result5 = textBox7.Text;
                string output5 = result5.Split('(', ')')[1];
                output5 = output5.Remove(output5.Length - 1);
                double r5 = Convert.ToDouble(output5);


                double[] max_array = new double[] { r1, r2, r3, r4, r5 };

                double max = max_array.Max();
                if (r1 == max)
                {
                    textBox8.Text = "Best Algoritm is Decision Tree Algorithm ";
                }
                else if (r2 == max)
                {
                    textBox8.Text = "Best Algoritm is Naive Bayes Algorithm ";
                }
                else if (r3 == max)
                {
                    textBox8.Text = "Best Algoritm is K-Nearest Neighbour Algorithm ";
                }
                else if (r4 == max)
                {
                    textBox8.Text = "Best Algoritm is Artificial Neural Network Algorithm ";
                }
                else if (r5 == max)
                {
                    textBox8.Text = "Best Algoritm is Support Vector Machine Algorithm ";
                }
            }
        }
Esempio n. 26
0
		/// <summary> Input an instance for filtering. Ordinarily the instance is processed
		/// and made available for output immediately. Some filters require all
		/// instances be read before producing output.
		/// 
		/// </summary>
		/// <param name="instance">the input instance
		/// </param>
		/// <returns> true if the filtered instance may now be
		/// collected with output().
		/// </returns>
		/// <exception cref="IllegalStateException">if no input format has been set.
		/// </exception>
		public override bool input(Instance instance)
		{
			
			if (getInputFormat() == null)
			{
				throw new System.SystemException("No input instance format defined");
			}
			if (m_NewBatch)
			{
				resetQueue();
				m_NewBatch = false;
			}
			if (instance.isMissing(m_AttIndex.Index))
			{
				if (!get_MatchMissingValues())
				{
					push((Instance) instance.copy());
					return true;
				}
				else
				{
					return false;
				}
			}
			if (Numeric)
			{
				if (!m_Values.Invert)
				{
					if (instance.value_Renamed(m_AttIndex.Index) < m_Value)
					{
						push((Instance) instance.copy());
						return true;
					}
				}
				else
				{
					if (instance.value_Renamed(m_AttIndex.Index) >= m_Value)
					{
						push((Instance) instance.copy());
						return true;
					}
				}
			}
			if (Nominal)
			{
				if (m_Values.isInRange((int) instance.value_Renamed(m_AttIndex.Index)))
				{
					Instance temp = (Instance) instance.copy();
					if (get_ModifyHeader())
					{
						temp.setValue(m_AttIndex.Index, m_NominalMapping[(int) instance.value_Renamed(m_AttIndex.Index)]);
					}
					push(temp);
					return true;
				}
			}
			return false;
		}
Esempio n. 27
0
        ///            
        ///             <summary> * Classifies a given instance.
        ///             * </summary>
        ///             * <param name="instance"> the instance to be classified </param>
        ///             * <returns> index of the predicted class </returns>
        ///             
        public override double classifyInstance(Instance instance)
        {
            if (randomGenerator == null)
            {
                randomGenerator = new System.Random((int)System.DateTime.Now.Ticks);
            }
            var rand = randomGenerator.NextDouble();

            for (int i = 0; i < m_normalCounts.Length; ++i)
            {
                if (rand < m_normalCounts[i])
                    return i == 1 ? 0 : i;
            }
            return 0;
        }
Esempio n. 28
0
        public override double classifyInstance(weka.core.Instance instance)
        {
            if (m_instances.numInstances() == 0)
            {
                return(2);
            }

            if (m_instances.numAttributes() != instance.numAttributes())
            {
                throw new AssertException("different attribute.");
            }
            int n = (instance.numAttributes() - 1) / 2;
            List <Tuple <int, int> > dist = new List <Tuple <int, int> >();

            for (int i = 0; i < m_instances.numInstances(); ++i)
            {
                int d1 = 0, d2 = 0;
                weka.core.Instance instanceI = m_instances.instance(i);
                for (int j = 0; j < n; ++j)
                {
                    //d += (int)((instanceI.value(j) - instance.value(j)) * (instanceI.value(j) - instance.value(j)));
                    if (instanceI.value(j) != instance.value(j))
                    {
                        if (instance.value(j) == 2 || instanceI.value(j) == 2)
                        {
                            d1++;
                        }
                        else
                        {
                            d1 += 4;
                        }
                    }
                }
                for (int j = n; j < 2 * n; ++j)
                {
                    //d += (int)((instanceI.value(j) - instance.value(j)) * (instanceI.value(j) - instance.value(j)));
                    if (instanceI.value(j) != instance.value(j))
                    {
                        if (instance.value(j) == 2 || instanceI.value(j) == 2)
                        {
                            d2++;
                        }
                        else
                        {
                            d2 += 4;
                        }
                    }
                }
                int c = (int)instanceI.classValue();
                //if (c == 0)
                //{
                //    if (d1 < n / 4 && d1 < d2)
                //    {
                //        dist.Add(new Tuple<int, int>(d1, c));
                //    }
                //}
                //else if (c == 1)
                //{
                //    if (d2 < n / 4 && d2 < d1)
                //    {
                //        dist.Add(new Tuple<int, int>(d2, c));
                //    }
                //}
                //else
                //{
                //    throw new AssertException("");
                //}
                dist.Add(new Tuple <int, int>(d1 + d2, c));
            }
            if (dist.Count == 0)
            {
                return(2);
            }

            dist.Sort(new Comparison <Tuple <int, int> >((x, y) =>
            {
                return(x.Item1.CompareTo(y.Item1));
            }));

            int sum = 0, count = 0;

            for (int i = 0; i < dist.Count; ++i)
            {
                if (dist[i].Item1 < n / 4 * 2 * 4)
                {
                    if (dist[i].Item2 != 2 && dist[i].Item2 != 3)
                    {
                        sum += dist[i].Item2;
                        count++;
                    }
                    else
                    {
                    }
                }
                else
                {
                    break;
                }
            }
            if (count == 0)
            {
                return(2);
            }
            if (count < m_instances.numInstances() / 30)
            {
                return(2);
            }
            return((int)Math.Round((double)sum / count));
        }
Esempio n. 29
0
 public override double[] distributionForInstance(Instance instance)
 {
     double n = x + y + z + w;
     return new double[] { (x + w / 2) / n, (y + w / 2) / n, z / n, 0 };
 }
Esempio n. 30
0
		/// <summary> Input an instance for filtering. Filter requires all
		/// training instances be read before producing output.
		/// 
		/// </summary>
		/// <param name="instance">the input instance
		/// </param>
		/// <returns> true if the filtered instance may now be
		/// collected with output().
		/// </returns>
		/// <exception cref="IllegalStateException">if no input structure has been defined
		/// </exception>
		public override bool input(Instance instance)
		{
			
			if (getInputFormat() == null)
			{
				throw new System.SystemException("No input instance format defined");
			}
			if (m_NewBatch)
			{
				resetQueue();
				m_NewBatch = false;
			}
			if (m_FirstBatchDone)
			{
				push(instance);
				return true;
			}
			else
			{
				bufferInput(instance);
				return false;
			}
		}
Esempio n. 31
0
        public override double classifyInstance(Instance instance)
        {
            if (m_mustValue.HasValue)
                return m_mustValue.Value;

            instance.setDataset(m_sampleInstances);
            instance.setClassValue(0);
            m_sampleInstances.clear();
            m_sampleInstances.add(instance);

            double[] d = classifyInstances(m_sampleInstances);
            return d[0];
        }
Esempio n. 32
0
        private void btnDiscover_Click(object sender, EventArgs e)
        {
            string type  = model.GetType().ToString();
            bool   flag  = false;
            bool   flag2 = false;

            //input kontrolleri
            if (nominal != null)
            {
                for (int i = 0; i < nominal.Length; i++)
                {
                    if (nominal[i].SelectedIndex == -1)
                    {
                        flag = true;
                        break;
                    }
                }
            }
            if (numeric != null)
            {
                for (int i = 0; i < numeric.Length; i++)
                {
                    if (String.IsNullOrEmpty(numeric[i].Text))
                    {
                        flag2 = true;
                        break;
                    }
                }
            }
            if (numAtt == numeric.Length && flag2 == true)
            {
                MessageBox.Show("Please select value!", "Error Message!");
            }
            else if (numAtt == nominal.Length && flag == true)
            {
                MessageBox.Show("Please select value!", "Error Message!");
            }
            else if ((nominal.Length + numeric.Length) == numAtt && (flag == true || flag2 == true))
            {
                MessageBox.Show("Please select value!", "Error Message!");
            }
            else
            {
                weka.core.Instance newIns = new weka.core.Instance(numAtt + 1);
                newIns.setDataset(insts);

                int i1 = 0, i2 = 0;
                for (int i = 0; i < numAtt; i++)
                {
                    //nominal
                    if (typeAtt[i])
                    {
                        newIns.setValue(i, nominal[i1].SelectedItem.ToString());
                        i1++;
                    }
                    //numeric
                    else
                    {
                        newIns.setValue(i, double.Parse(numeric[i2].Text));
                        i2++;
                    }
                }

                weka.core.Instances insts2 = new weka.core.Instances(insts);
                insts2.add(newIns);

                if (type == "weka.classifiers.bayes.NaiveBayes")
                {
                    weka.filters.Filter myDiscretize = new weka.filters.unsupervised.attribute.Discretize();
                    myDiscretize.setInputFormat(insts2);
                    insts2 = weka.filters.Filter.useFilter(insts2, myDiscretize);
                }

                else if (type == "weka.classifiers.functions.Logistic")
                {
                    weka.filters.Filter myDummy = new weka.filters.unsupervised.attribute.NominalToBinary();
                    myDummy.setInputFormat(insts2);
                    insts2 = weka.filters.Filter.useFilter(insts2, myDummy);

                    weka.filters.Filter myNormalize = new weka.filters.unsupervised.instance.Normalize();
                    myNormalize.setInputFormat(insts2);
                    insts2 = weka.filters.Filter.useFilter(insts2, myNormalize);
                }

                else if (type == "new weka.classifiers.lazy.IBk")
                {
                    weka.filters.Filter myDummy = new weka.filters.unsupervised.attribute.NominalToBinary();
                    myDummy.setInputFormat(insts2);
                    insts2 = weka.filters.Filter.useFilter(insts2, myDummy);

                    weka.filters.Filter myNormalize = new weka.filters.unsupervised.instance.Normalize();
                    myNormalize.setInputFormat(insts2);
                    insts2 = weka.filters.Filter.useFilter(insts2, myNormalize);
                }
                else if (type == "weka.classifiers.trees.J48")
                {
                }
                else if (type == "weka.classifiers.trees.RandomForest")
                {
                }
                else if (type == "weka.classifiers.trees.RandomTree")
                {
                }
                else if (type == "weka.classifiers.functions.MultilayerPerceptron")
                {
                    weka.filters.Filter myDummy = new weka.filters.unsupervised.attribute.NominalToBinary();
                    myDummy.setInputFormat(insts2);
                    insts2 = weka.filters.Filter.useFilter(insts2, myDummy);

                    weka.filters.Filter myNormalize = new weka.filters.unsupervised.instance.Normalize();
                    myNormalize.setInputFormat(insts2);
                    insts2 = weka.filters.Filter.useFilter(insts2, myNormalize);
                }
                else if (type == "weka.classifiers.functions.SMO")
                {
                    weka.filters.Filter myDummy = new weka.filters.unsupervised.attribute.NominalToBinary();
                    myDummy.setInputFormat(insts2);
                    insts2 = weka.filters.Filter.useFilter(insts2, myDummy);

                    weka.filters.Filter myNormalize = new weka.filters.unsupervised.instance.Normalize();
                    myNormalize.setInputFormat(insts2);
                    insts2 = weka.filters.Filter.useFilter(insts2, myNormalize);
                }

                double index = model.classifyInstance(insts2.lastInstance());
                //Model okuma kısmı
                weka.classifiers.Classifier cls = (weka.classifiers.Classifier)weka.core.SerializationHelper.read("models/mdl.model");
                lblResult2.Text = "Result= " + insts2.attribute(insts2.numAttributes() - 1).value(Convert.ToInt16(index));
            }
        }
Esempio n. 33
0
 public override double classifyInstance(weka.core.Instance instance)
 {
     return(m_latestClass);
 }
Esempio n. 34
0
 public double GetVolume(weka.core.Instance instance)
 {
     return(m_volume);
 }
Esempio n. 35
0
        public void classifyTest()
        {
            try
            {
                CSV2Arff();
                java.io.FileReader  arrfFile = new java.io.FileReader("D:/final_version/Gesture-Gis-master/GestureGis2/ComparisonFeaturefile.arff");
                weka.core.Instances insts    = new weka.core.Instances(arrfFile);
                //weka.core.Instances insts2 = new weka.core.Instances(new java.io.FileReader("D:/Gesture-Gis-master/GestureGis2/ComparisonFeaturefile.arff"));
                insts.setClassIndex(insts.numAttributes() - 1);

                //int percentSplit = 66;

                weka.classifiers.Classifier cl = new weka.classifiers.trees.J48();
                //Console.WriteLine("Performing " + percentSplit + "% split evaluation.");

                //randomize the order of the instances in the dataset.
                //weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                //myRandom.setInputFormat(insts);
                //insts = weka.filters.Filter.useFilter(insts, myRandom);
                int count                 = insts.numInstances();
                int trainSize             = count - 1;
                int testSize              = count - trainSize;
                weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);

                cl.buildClassifier(train);
                //weka.core.Instance current = insts2.instance(0);
                int numCorrect = 0;

                /*for (int i = trainSize; i < insts.numInstances(); i++)
                 * {
                 *  weka.core.Instance currentInst = insts.instance(i);
                 *  double predictedClass = cl.classifyInstance(currentInst);
                 *  if (predictedClass == insts.instance(i).classValue())
                 *      numCorrect++;
                 * }*/
                int index = count - 1;
                weka.core.Instance currentInst    = insts.instance(index);
                double             predictedClass = cl.classifyInstance(currentInst);
                int pre = (int)predictedClass;
                if (predictedClass == insts.instance(index).classValue())
                {
                    numCorrect++;
                }
                //insts.instance(index).classAttribute();
                //insts.attribute(11);
                string s = insts.toString();
                s = s.Substring(s.IndexOf("{") + 1);
                s = s.Substring(0, s.IndexOf("}"));
                s = s.Substring(0, s.Length);
                string[] ae = s.Split(',');

                /*ArrayList arr = new ArrayList();
                 * string path_class = @"D:\final_version\Gesture-Gis-master\GestureGis2\Classfile.txt";
                 * using (StreamReader reader = new StreamReader(path_class))
                 * {
                 *  while (!reader.EndOfStream)
                 *  {
                 *      arr.Add(reader.ReadLine());
                 *  }
                 *  reader.Close();
                 * }*/
                PredictedClassbyWeka = (string)(ae[pre]);
                arrfFile.close();

                //insts.instance(index).attribute(3);

                /*System.Diagnostics.Debug.WriteLine(numCorrect + " out of " + testSize + " correct (" +
                 *         (double)((double)numCorrect / (double)testSize * 100.0) + "%)");
                 * Console.WriteLine(numCorrect + " out of " + testSize + " correct (" +
                 *         (double)((double)numCorrect / (double)testSize * 100.0) + "%)");*/
            }
            catch (java.lang.Exception ex)
            {
                ex.printStackTrace();
            }
        }
Esempio n. 36
0
 static public double do_test_single(weka.classifiers.Classifier classifier, weka.core.Instances insts_test)
 {
     weka.core.Instance currentInst = insts_test.lastInstance();
     return(classifier.classifyInstance(currentInst));
 }
        public weka.core.Instance GenerateFeatures(PokerHand hand, int rIdx, int aIdx, weka.core.Instances data, bool generateClass = true)
        {
            // Check that we are using limit betting.
            Debug.Assert(hand.Context.BettingType == BettingType.FixedLimit);

            var results = new weka.core.Instance(data.numAttributes());
            results.setDataset(data);
            int attIdx = 0;
            foreach (var method in typeof(LimitFeatureGenerator).GetMethods())
            {
                // Get all the features of this class.
                var attributes = method.GetCustomAttributes(typeof(Feature), true);
                if (attributes.Length == 0)
                    continue;

                // Get the feature attribute on this method.
                var attr = ((Feature)attributes[0]);

                // Get the name for this attribute
                string name = attr.Name;
                //Console.WriteLine("Hand: {0} Feature: {1}", hand.Context.ID, name);

                // Get the feature only if it's applicable to this situation.
                object feature = null;
                if(rIdx >= (int)attr.MinRound && rIdx <= (int)attr.MaxRound)
                    feature = method.Invoke(this, new object[] { hand, rIdx, aIdx });

                if (SkipMissingFeatures && (feature == null || feature.ToString() == "?"))
                    continue;

                switch (attr.FType) {
                case FeatureType.Continuous:
                    results.setValue(attIdx, (double)feature);
                    break;
                case FeatureType.Discrete:
                    results.setValue(attIdx, (int)feature);
                    break;
                case FeatureType.Boolean:
                case FeatureType.Nominal:
                case FeatureType.Enum:
                {
                    var attribute = data.attribute(attIdx);
                    var attVal = attribute.indexOfValue(feature.ToString());
                    if(attVal < 0 || attVal > attribute.numValues())
                        throw new Exception(string.Format ("Invalid attribute value: {0} for attribute {1} (idx: {2} total values: {3}", feature.ToString(), name, attVal, attribute.numValues()));
                    results.setValue(attribute, attVal);
                }
                    break;
                case FeatureType.String:
                {
                    var attribute = data.attribute(attIdx);
                    results.setValue(attribute, feature.ToString());
                }
                    break;
                default: throw new Exception("Unspecified feature type for feature: " + method.Name);
                }

                attIdx++;
            }

            if(generateClass)
            {
                var classAttr = data.classAttribute();
                switch (hand.Rounds[rIdx].Actions[aIdx].Type)
                {
                case ActionType.Bet:
                case ActionType.Raise: results.setClassValue(classAttr.indexOfValue("Raise"));
                    break;
                case ActionType.Call:
                case ActionType.Check: results.setClassValue(classAttr.indexOfValue("Call"));
                    break;
                case ActionType.Fold: results.setClassValue(classAttr.indexOfValue("Fold"));;
                    break;
                default:
                    break;
                }
            }

            return results;
        }
 public double[] ClassifyNewData(double[] newData)
 {
     weka.core.Instance newInstance = new weka.core.Instance(1, newData);
     newInstance.setDataset(playerData);
     return(classifier.distributionForInstance(newInstance));
 }
Esempio n. 39
0
        public override double classifyInstance(weka.core.Instance instance)
        {
            if (m_instances.numInstances() == 0)
            {
                return(2);
            }

            if (m_instances.numAttributes() != instance.numAttributes())
            {
                throw new AssertException("different attribute.");
            }
            int n = instance.numAttributes();
            List <Tuple <int, int> > dist = new List <Tuple <int, int> >();

            for (int i = 0; i < m_instances.numInstances(); ++i)
            {
                int d1 = 0, d2 = 0;
                weka.core.Instance instanceI = m_instances.instance(i);
                for (int j = 0; j < n; ++j)
                {
                    //d += (int)((instanceI.value(j) - instance.value(j)) * (instanceI.value(j) - instance.value(j)));
                    if (instanceI.value(j) != instance.value(j))
                    {
                        d1++;
                    }
                    if (instance.value(j) != 0)
                    {
                        d2++;
                    }
                }
                int c = (int)instanceI.classValue();

                dist.Add(new Tuple <int, int>(d1, c));
            }
            if (dist.Count == 0)
            {
                return(2);
            }

            dist.Sort(new Comparison <Tuple <int, int> >((x, y) =>
            {
                return(x.Item1.CompareTo(y.Item1));
            }));

            int sum = 0, count = 0;

            for (int i = 0; i < dist.Count; ++i)
            {
                if (dist[i].Item1 < 4)
                {
                    sum += dist[i].Item2;
                    count++;
                }
                else
                {
                    break;
                }
            }
            if (count == 0)
            {
                return(2);
            }
            if (count < m_instances.numInstances() / 70)
            {
                return(2);
            }
            return((int)Math.Round((double)sum / count));
        }
Esempio n. 40
0
		/// <summary> Input an instance for filtering. Ordinarily the instance is processed
		/// and made available for output immediately. Some filters require all
		/// instances be read before producing output.
		/// 
		/// </summary>
		/// <param name="instance">the input instance
		/// </param>
		/// <returns> true if the filtered instance may now be
		/// collected with output().
		/// </returns>
		/// <exception cref="IllegalStateException">if no input structure has been defined.
		/// </exception>
		public override bool input(Instance instance)
		{
			
			if (getInputFormat() == null)
			{
				throw new System.SystemException("No input instance format defined");
			}
			if (m_NewBatch)
			{
				resetQueue();
				m_NewBatch = false;
			}
			
			if (getOutputFormat().numAttributes() == 0)
			{
				return false;
			}
			double[] vals = new double[getOutputFormat().numAttributes()];
			for (int i = 0; i < m_SelectedAttributes.Length; i++)
			{
				int current = m_SelectedAttributes[i];
				vals[i] = instance.value_Renamed(current);
			}
			Instance inst = null;
			if (instance is SparseInstance)
			{
				inst = new SparseInstance(instance.weight(), vals);
			}
			else
			{
				inst = new Instance(instance.weight(), vals);
			}
			copyStringValues(inst, false, instance.dataset(), m_InputStringIndex, getOutputFormat(), OutputStringIndex);
			inst.Dataset = getOutputFormat();
			push(inst);
			return true;
		}
Esempio n. 41
0
        private void result_Click(object sender, EventArgs e)
        {
            ArrayList algorithms = new ArrayList();

            algorithms.Add("Naive Bayes");
            algorithms.Add("K Nearest Neighbor");
            algorithms.Add("Decision Tree");
            algorithms.Add("Neural Network");
            algorithms.Add("Support Vector Machine");
            ArrayList successPercent = new ArrayList();
            double    res_Naive, res_KNN, res_NN, res_Tree, res_SVM = 0.0;
            string    nameOfAlgo = "";

            //NAIVE BAYES ALGORITHM
            weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader(fileDirectory));

            //CREATIING DYNAMIC GRIDVIEW FOR ADDING NEW INSTANCE
            dataGridView1.ColumnCount   = 2;
            dataGridView1.RowCount      = insts.numAttributes();
            String[,] matrixOfInstances = new String[insts.numInstances(), insts.numAttributes()];



            for (int y = 0; y < insts.numAttributes() - 1; y++)
            {
                dataGridView1.Rows[y].Cells[0].Value = insts.attribute(y).name();
                if (insts.attribute(y).isNominal())
                {
                    //nominalDataValues.Add(insts.attribute(y).toString());
                    string   phrase = insts.attribute(y).toString();
                    string[] first  = phrase.Split('{');

                    string[] second = first[1].Split('}');

                    string[] attributeValues = second[0].Split(',');

                    DataGridViewComboBoxCell comboColumn = new DataGridViewComboBoxCell();

                    foreach (var a in attributeValues)
                    {
                        comboColumn.Items.Add(a);
                    }
                    dataGridView1.Rows[y].Cells[1] = comboColumn;
                }
            }

            insts.setClassIndex(insts.numAttributes() - 1);
            cl_Naive = new weka.classifiers.bayes.NaiveBayes();

            weka.filters.Filter myNominalData = new weka.filters.unsupervised.attribute.Discretize();
            myNominalData.setInputFormat(insts);
            insts = weka.filters.Filter.useFilter(insts, myNominalData);


            //randomize the order of the instances in the dataset.
            weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
            myRandom.setInputFormat(insts);
            insts = weka.filters.Filter.useFilter(insts, myRandom);

            int trainSize = insts.numInstances() * percentSplit / 100;
            int testSize  = insts.numInstances() - trainSize;

            weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);

            cl_Naive.buildClassifier(train);

            string str = cl_Naive.toString();

            int numCorrect = 0;

            for (int i = trainSize; i < insts.numInstances(); i++)
            {
                weka.core.Instance currentInst    = insts.instance(i);
                double             predictedClass = cl_Naive.classifyInstance(currentInst);
                if (predictedClass == insts.instance(i).classValue())
                {
                    numCorrect++;
                }
            }
            res_Naive = (double)((double)numCorrect / (double)testSize * 100.0);
            successPercent.Add(res_Naive);
            //kNN

            weka.core.Instances insts2 = new weka.core.Instances(new java.io.FileReader(fileDirectory));

            insts2.setClassIndex(insts2.numAttributes() - 1);

            cl_Knn = new weka.classifiers.lazy.IBk();

            //Nominal to Binary
            weka.filters.Filter myBinaryData = new weka.filters.unsupervised.attribute.NominalToBinary();
            myBinaryData.setInputFormat(insts2);
            insts2 = weka.filters.Filter.useFilter(insts2, myBinaryData);

            //Normalization
            weka.filters.Filter myNormalized = new weka.filters.unsupervised.instance.Normalize();
            myNormalized.setInputFormat(insts2);
            insts2 = weka.filters.Filter.useFilter(insts2, myNormalized);

            //randomize the order of the instances in the dataset.
            weka.filters.Filter myRandom2 = new weka.filters.unsupervised.instance.Randomize();
            myRandom2.setInputFormat(insts2);
            insts2 = weka.filters.Filter.useFilter(insts2, myRandom2);

            int trainSize2 = insts2.numInstances() * percentSplit / 100;
            int testSize2  = insts2.numInstances() - trainSize2;

            weka.core.Instances train2 = new weka.core.Instances(insts2, 0, trainSize2);

            cl_Knn.buildClassifier(train2);

            string str2 = cl_Knn.toString();

            int numCorrect2 = 0;

            for (int i = trainSize2; i < insts2.numInstances(); i++)
            {
                weka.core.Instance currentInst2   = insts2.instance(i);
                double             predictedClass = cl_Knn.classifyInstance(currentInst2);
                if (predictedClass == insts2.instance(i).classValue())
                {
                    numCorrect2++;
                }
            }
            res_KNN = (double)((double)numCorrect2 / (double)testSize2 * 100.0);
            successPercent.Add(res_KNN);

            //Decision tree
            weka.core.Instances insts3 = new weka.core.Instances(new java.io.FileReader(fileDirectory));

            insts3.setClassIndex(insts3.numAttributes() - 1);

            cl_Tree = new weka.classifiers.trees.J48();



            weka.filters.Filter myNormalized2 = new weka.filters.unsupervised.instance.Normalize();
            myNormalized2.setInputFormat(insts3);
            insts3 = weka.filters.Filter.useFilter(insts3, myNormalized2);


            //randomize the order of the instances in the dataset.
            weka.filters.Filter myRandom3 = new weka.filters.unsupervised.instance.Randomize();
            myRandom3.setInputFormat(insts3);
            insts3 = weka.filters.Filter.useFilter(insts3, myRandom3);

            int trainSize3 = insts3.numInstances() * percentSplit / 100;
            int testSize3  = insts3.numInstances() - trainSize3;

            weka.core.Instances train3 = new weka.core.Instances(insts3, 0, trainSize3);

            cl_Tree.buildClassifier(train3);

            string str3 = cl_Tree.toString();

            int numCorrect3 = 0;

            for (int i = trainSize3; i < insts3.numInstances(); i++)
            {
                weka.core.Instance currentInst3   = insts3.instance(i);
                double             predictedClass = cl_Tree.classifyInstance(currentInst3);
                if (predictedClass == insts3.instance(i).classValue())
                {
                    numCorrect3++;
                }
            }
            res_Tree = (double)((double)numCorrect3 / (double)testSize3 * 100.0);
            successPercent.Add(res_Tree);

            //Neural Network
            weka.core.Instances insts4 = new weka.core.Instances(new java.io.FileReader(fileDirectory));

            insts4.setClassIndex(insts4.numAttributes() - 1);

            cl_NN = new weka.classifiers.functions.MultilayerPerceptron();

            //Nominal to Binary
            weka.filters.Filter myBinaryData2 = new weka.filters.unsupervised.attribute.NominalToBinary();
            myBinaryData2.setInputFormat(insts4);
            insts4 = weka.filters.Filter.useFilter(insts4, myBinaryData2);

            //Normalization
            weka.filters.Filter myNormalized3 = new weka.filters.unsupervised.instance.Normalize();
            myNormalized3.setInputFormat(insts4);
            insts4 = weka.filters.Filter.useFilter(insts4, myNormalized3);

            //randomize the order of the instances in the dataset.
            weka.filters.Filter myRandom4 = new weka.filters.unsupervised.instance.Randomize();
            myRandom4.setInputFormat(insts4);
            insts4 = weka.filters.Filter.useFilter(insts4, myRandom4);

            int trainSize4 = insts4.numInstances() * percentSplit / 100;
            int testSize4  = insts4.numInstances() - trainSize4;

            weka.core.Instances train4 = new weka.core.Instances(insts4, 0, trainSize4);

            cl_NN.buildClassifier(train4);

            string str4 = cl_NN.toString();

            int numCorrect4 = 0;

            for (int i = trainSize4; i < insts4.numInstances(); i++)
            {
                weka.core.Instance currentInst4   = insts4.instance(i);
                double             predictedClass = cl_NN.classifyInstance(currentInst4);
                if (predictedClass == insts4.instance(i).classValue())
                {
                    numCorrect4++;
                }
            }

            res_NN = (double)((double)numCorrect4 / (double)testSize4 * 100.0);
            successPercent.Add(res_NN);

            //SVM
            weka.core.Instances insts5 = new weka.core.Instances(new java.io.FileReader(fileDirectory));

            insts5.setClassIndex(insts5.numAttributes() - 1);

            cl_SVM = new weka.classifiers.functions.SMO();

            //Nominal to Binary
            weka.filters.Filter myBinaryData3 = new weka.filters.unsupervised.attribute.NominalToBinary();
            myBinaryData3.setInputFormat(insts5);
            insts5 = weka.filters.Filter.useFilter(insts5, myBinaryData3);

            //Normalization
            weka.filters.Filter myNormalized4 = new weka.filters.unsupervised.instance.Normalize();
            myNormalized4.setInputFormat(insts5);
            insts5 = weka.filters.Filter.useFilter(insts5, myNormalized4);

            //randomize the order of the instances in the dataset.
            weka.filters.Filter myRandom5 = new weka.filters.unsupervised.instance.Randomize();
            myRandom5.setInputFormat(insts5);
            insts5 = weka.filters.Filter.useFilter(insts5, myRandom5);

            int trainSize5 = insts5.numInstances() * percentSplit / 100;
            int testSize5  = insts5.numInstances() - trainSize5;

            weka.core.Instances train5 = new weka.core.Instances(insts5, 0, trainSize5);

            cl_SVM.buildClassifier(train5);

            string str5 = cl_SVM.toString();

            int numCorrect5 = 0;

            for (int i = trainSize5; i < insts5.numInstances(); i++)
            {
                weka.core.Instance currentInst5   = insts5.instance(i);
                double             predictedClass = cl_SVM.classifyInstance(currentInst5);
                if (predictedClass == insts5.instance(i).classValue())
                {
                    numCorrect5++;
                }
            }
            res_SVM = (double)((double)numCorrect5 / (double)testSize5 * 100.0);
            successPercent.Add(res_SVM);


            for (int i = 0; i < successPercent.Count; i++)
            {
                if ((double)successPercent[i] > max)
                {
                    max   = (double)successPercent[i];
                    count = i + 1;
                }
            }
            for (int i = 0; i < count; i++)
            {
                nameOfAlgo = (string)algorithms[i];
            }

            textBox1.Text = nameOfAlgo + " is the most successful algorithm for this data set." + "(" + max + "%)\n";
        }
Esempio n. 42
0
		/// <summary> Adds an output instance to the queue. The derived class should use this
		/// method for each output instance it makes available. 
		/// 
		/// </summary>
		/// <param name="instance">the instance to be added to the queue.
		/// </param>
		protected internal virtual void  push(Instance instance)
		{
			
			if (instance != null)
			{
				copyStringValues(instance, m_OutputFormat, m_OutputStringAtts);
				instance.Dataset = m_OutputFormat;
				m_OutputQueue.push(instance);
			}
		}
Esempio n. 43
0
		/// <summary> Adds the supplied input instance to the inputformat dataset for
		/// later processing.  Use this method rather than
		/// getInputFormat().add(instance). Or else. Note that the provided
		/// instance gets copied when buffered. 
		/// 
		/// </summary>
		/// <param name="instance">the <code>Instance</code> to buffer.  
		/// </param>
		protected internal virtual void  bufferInput(Instance instance)
		{
			
			if (instance != null)
			{
				copyStringValues(instance, m_InputFormat, m_InputStringAtts);
				m_InputFormat.add(instance);
			}
		}
Esempio n. 44
0
		/// <summary> Copies string values contained in the instance copied to a new
		/// dataset. The Instance must already be assigned to a dataset. This
		/// dataset and the destination dataset must have the same structure.
		/// 
		/// </summary>
		/// <param name="instance">the Instance containing the string values to copy.
		/// </param>
		/// <param name="destDataset">the destination set of Instances
		/// </param>
		/// <param name="strAtts">an array containing the indices of any string attributes
		/// in the dataset.  
		/// </param>
		private void  copyStringValues(Instance inst, Instances destDataset, int[] strAtts)
		{
			
			if (strAtts.Length == 0)
			{
				return ;
			}
			if (inst.dataset() == null)
			{
				throw new System.ArgumentException("Instance has no dataset assigned!!");
			}
			else if (inst.dataset().numAttributes() != destDataset.numAttributes())
			{
				throw new System.ArgumentException("Src and Dest differ in # of attributes!!");
			}
			copyStringValues(inst, true, inst.dataset(), strAtts, destDataset, strAtts);
		}
Esempio n. 45
0
        public static string classifyTest(string file, string classifier)
        {
            string data = "No data";

            try
            {
                //weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader("C:\\Users\\kinli\\source\\repos\\WebApplication2\\WebApplication2\\iris.arff"));
                weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader(file));
                insts.setClassIndex(insts.numAttributes() - 1);
                weka.classifiers.Classifier cl = new weka.classifiers.trees.J48();

                if (classifier == "J48")
                {
                    cl = new weka.classifiers.trees.J48();
                }
                else if (classifier == "MLP")
                {
                    cl = new weka.classifiers.functions.MultilayerPerceptron();
                }
                else if (classifier == "NaiveBayes")
                {
                    cl = new weka.classifiers.bayes.NaiveBayes();
                }

                //data = ("Performing " + percentSplit + "% split evaluation.\n");
                data = ("Performing use training set evaluation.\n");
                //randomize the order of the instances in the dataset.

                weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                myRandom.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myRandom);

                /*
                 * int trainSize = insts.numInstances() * percentSplit / 100;
                 * int testSize = insts.numInstances() - trainSize;
                 * weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);
                 *
                 * cl.buildClassifier(train);
                 * int numCorrect = 0;
                 * for (int i = trainSize; i < insts.numInstances(); i++)
                 * {
                 *  weka.core.Instance currentInst = insts.instance(i);
                 *  double predictedClass = cl.classifyInstance(currentInst);
                 *  if (predictedClass == insts.instance(i).classValue())
                 *      numCorrect++;
                 * }*/
                cl.buildClassifier(insts);

                int numCorrect = 0;
                for (int i = 0; i < insts.numInstances(); i++)
                {
                    weka.core.Instance currentInst    = insts.instance(i);
                    double             predictedClass = cl.classifyInstance(currentInst);
                    if (predictedClass == insts.instance(i).classValue())
                    {
                        numCorrect++;
                    }
                }
                data = data + (numCorrect + " out of " + insts.numInstances() + " correct (" +
                               (double)((double)numCorrect / (double)insts.numInstances() * 100.0) + "%)");
            }
            catch (java.lang.Exception ex)
            {
                data = "Error";
                ex.printStackTrace();
            }
            return(data);
        }
Esempio n. 46
0
		/// <summary> Takes string values referenced by an Instance and copies them from a
		/// source dataset to a destination dataset. The instance references are
		/// updated to be valid for the destination dataset. The instance may have the 
		/// structure (i.e. number and attribute position) of either dataset (this
		/// affects where references are obtained from). The source dataset must
		/// have the same structure as the filter input format and the destination
		/// must have the same structure as the filter output format.
		/// 
		/// </summary>
		/// <param name="instance">the instance containing references to strings in the source
		/// dataset that will have references updated to be valid for the destination
		/// dataset.
		/// </param>
		/// <param name="instSrcCompat">true if the instance structure is the same as the
		/// source, or false if it is the same as the destination
		/// </param>
		/// <param name="srcDataset">the dataset for which the current instance string
		/// references are valid (after any position mapping if needed)
		/// </param>
		/// <param name="destDataset">the dataset for which the current instance string
		/// references need to be inserted (after any position mapping if needed)
		/// </param>
		protected internal virtual void  copyStringValues(Instance instance, bool instSrcCompat, Instances srcDataset, Instances destDataset)
		{
			
			copyStringValues(instance, instSrcCompat, srcDataset, m_InputStringAtts, destDataset, m_OutputStringAtts);
		}
Esempio n. 47
0
        private void button1_Click(object sender, EventArgs e)
        {
            string         fname  = "";
            OpenFileDialog dialog = new OpenFileDialog();

            dialog.Filter =
                "Weka Files (*.arff)|*.arff|All files (*.*)|*.*";
            dialog.InitialDirectory = Application.StartupPath;
            dialog.Title            = "Select a .arff file";
            if (dialog.ShowDialog() == DialogResult.OK)
            {
                fname = dialog.FileName;
                //label5.Text = System.IO.Directory.;
            }
            if (fname == "")
            {
                return;
            }
            try
            {
                weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader(fname.ToString()));
                insts.setClassIndex(insts.numAttributes() - 1);


                Classifier cl = new weka.classifiers.functions.SMO();
                //label1.Text = "Performing " + percentSplit + "% split evaluation.";


                //randomize the order of the instances in the dataset.
                weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                myRandom.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myRandom);

                int trainSize             = insts.numInstances() * percentSplit / 100;
                int testSize              = insts.numInstances() - trainSize;
                weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);

                cl.buildClassifier(train);
                int numCorrect = 0;
                for (int i = trainSize; i < insts.numInstances(); i++)
                {
                    weka.core.Instance currentInst    = insts.instance(i);
                    double             predictedClass = cl.classifyInstance(currentInst);
                    if (predictedClass == insts.instance(i).classValue())
                    {
                        numCorrect++;
                    }
                }
                //label1.Text = numCorrect + " out of " + testSize + " correct (" +
                //(double)((double)numCorrect / (double)testSize * 100.0) + "%)";

                label6.Text = testSize.ToString();
                label7.Text = numCorrect.ToString();
                label8.Text = (double)((double)numCorrect / (double)testSize * 100.0) + "%";
                double result_perc = (double)((double)numCorrect / (double)testSize * 100.0);

                result_perc = Math.Truncate(result_perc);

                try
                {
                    // Send Data On Serial port
                    SerialPort serialPort = new SerialPort("COM" + textBox1.Text + "", Int32.Parse(textBox2.Text), Parity.None, 8);
                    serialPort.Open();

                    if (result_perc <= 75)
                    {
                        serialPort.WriteLine("1");
                    }


                    serialPort.WriteLine("a");


                    serialPort.Close();
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.Message);
                }
            }
            catch (java.lang.Exception ex)
            {
                MessageBox.Show(ex.getMessage().ToString(), "");
            }
        }
    public void UpdateClassifierFromInstanceString(String line)
    {
        try {
            String[] lineSplit = line.Split(',');
            double[] fullData = new double[lineSplit.Length-1];
            for (int j = 0; j < lineSplit.Length-1; j++)
                fullData[j] = double.Parse(lineSplit[j]);

            String weightS = lineSplit[lineSplit.Length-1];
            //Debug.Log(weightS);
            weightS = weightS.Remove(weightS.Length-1,1);
            if(weightS[weightS.Length-1] == '}')
                weightS = weightS.Remove(weightS.Length-1,1);
            weightS = weightS.Remove(0,1);
            //Debug.Log(weightS);
            double weight = double.Parse(weightS);

            weka.core.Instance newInstance = new weka.core.Instance(weight,fullData);
            playerData.add(newInstance);
            classifier.buildClassifier(playerData);
        }
        catch (java.lang.Exception ex)
        {
            Debug.LogError(ex.getMessage());
        }
    }
Esempio n. 49
0
        ///            
        ///             <summary> * Classifies a given instance.
        ///             * </summary>
        ///             * <param name="instance"> the instance to be classified </param>
        ///             * <returns> index of the predicted class </returns>
        ///             
        public override double classifyInstance(Instance instance)
        {
            //if (m_counts[1] == 0 && m_counts[2] == 0)
            //    return 0;
            //else if (m_counts[0] == 0 && m_counts[2] == 0)
            //    return 1;
            //else
            //    return 2;

            double a = m_counts[0] / sumOfWeights;
            double b = m_counts[1] / sumOfWeights;

            if (a >= b && a >= m_delta)
                return 0;
            else if (b >= a && b >= m_delta)
                return 1;
            else
                return 2;
        }
Esempio n. 50
0
        public override double classifyInstance(Instance instance)
        {
            if (m_mustValue != -1)
                return m_mustValue;

            double delta = m_delta;
            if (delta == -1)
            {
                delta = 0.5;
            }
            return m_Classifier.distributionForInstance(instance)[1] < m_delta ? 0 : 2;
        }