Beispiel #1
0
		/// <summary> Sets the format of the input instances.
		/// 
		/// </summary>
		/// <param name="instanceInfo">an Instances object containing the input instance
		/// structure (any instances contained in the object are ignored - only the
		/// structure is required).
		/// </param>
		/// <returns> true if the outputFormat may be collected immediately
		/// </returns>
		/// <exception cref="Exception">if the format couldn't be set successfully
		/// </exception>
		public override bool setInputFormat(Instances instanceInfo)
		{
			
			base.setInputFormat(instanceInfo);
			
			m_SelectCols.Upper = instanceInfo.numAttributes() - 1;
			
			// Create the output buffer
			FastVector attributes = new FastVector();
			int outputClass = - 1;
			m_SelectedAttributes = m_SelectCols.Selection;
			int inStrKeepLen = 0;
			int[] inStrKeep = new int[m_SelectedAttributes.Length];
			for (int i = 0; i < m_SelectedAttributes.Length; i++)
			{
				int current = m_SelectedAttributes[i];
				if (instanceInfo.classIndex() == current)
				{
					outputClass = attributes.size();
				}
				Attribute keep = (Attribute) instanceInfo.attribute(current).copy();
				if (keep.type() == Attribute.STRING)
				{
					inStrKeep[inStrKeepLen++] = current;
				}
				attributes.addElement(keep);
			}
			m_InputStringIndex = new int[inStrKeepLen];
			Array.Copy(inStrKeep, 0, m_InputStringIndex, 0, inStrKeepLen);
			Instances outputFormat = new Instances(instanceInfo.relationName(), attributes, 0);
			outputFormat.ClassIndex = outputClass;
			setOutputFormat(outputFormat);
			return true;
		}
        /// <summary>
        /// Loads a classifier from a model file.
        /// </summary>
        /// <param name="filename">The filename (full path) that you want to load. Should be an .arff file and a .model file in your working directory.</param>
        public void loadModel(string filename)
        {
            if (debug)
            {
                Console.WriteLine("Model loading...");
            }
            _classifier = (weka.classifiers.Classifier)weka.core.SerializationHelper.read(filename + MODEL);
            _dataSet    = new weka.core.Instances(new java.io.FileReader(filename + ARFF));
            _dataSet.setClassIndex(_dataSet.numAttributes() - 1);

            if (debug)
            {
                Console.WriteLine("Model locked and loaded!");
            }
        }
Beispiel #3
0
 private weka.core.Instances ReadFile(string path)
 {
     try
     {
         weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader(path));
         insts.setClassIndex(insts.numAttributes() - 1);
         return(insts);
     }
     catch (java.lang.Exception ex)
     {
         ex.printStackTrace();
         MessageBox.Show(ex.ToString(), "Error reading file", MessageBoxButtons.OK, MessageBoxIcon.Error);
         return(null);
     }
 }
    protected void Button2_Click(object sender, EventArgs e)
    {
        weka.core.Instances data = new weka.core.Instances(new java.io.FileReader("d:\\train.arff"));
        data.setClassIndex(data.numAttributes() - 1);
        weka.classifiers.Classifier cls = new weka.classifiers.bayes.NaiveBayes();
        // weka.classifiers.functions.supportVector.SMOset();
        int runs  = 1;
        int folds = 10;

        //string sq = "delete from nbresults";
        //dbc.execfn(sq);
        // perform cross-validation
        for (int i = 0; i < runs; i++)
        {
            // randomize data
            int seed = i + 1;
            java.util.Random    rand     = new java.util.Random(seed);
            weka.core.Instances randData = new weka.core.Instances(data);
            randData.randomize(rand);
            if (randData.classAttribute().isNominal())
            {
                randData.stratify(folds);
            }
            // weka.classifiers.trees.j48 jj;
            weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(randData);
            for (int n = 0; n < folds; n++)
            {
                weka.core.Instances train = randData.trainCV(folds, n);
                weka.core.Instances test  = randData.testCV(folds, n);
                // build and evaluate classifier
                weka.classifiers.Classifier clsCopy = weka.classifiers.Classifier.makeCopy(cls);
                clsCopy.buildClassifier(train);

                eval.evaluateModel(clsCopy, test);
            }

            preci_value.Text  = eval.precision(0).ToString();
            recall_value.Text = eval.recall(0).ToString();
            acc_value.Text    = eval.fMeasure(0).ToString();

            string s = "NB";
            //    string str = "insert into evaluation values('" + instid.Text + "','" + courid.Text.ToString() + "','" + preci_value.Text.ToString() + "','" + recall_value.Text.ToString() + "','" + acc_value.Text.ToString() + "','" + s + "' )";
            //  db.execfn(str);
            //  MessageBox.Show("saved");
        }
    }
Beispiel #5
0
        private void btnLoadStep_Click(object sender, EventArgs e)
        {
            if (m_loadStepInstances == null)
            {
                using (OpenFileDialog d = new OpenFileDialog())
                {
                    d.Filter = "Arff File|*.arff";
                    if (d.ShowDialog() == System.Windows.Forms.DialogResult.OK)
                    {
                        m_loadStepInstances = new weka.core.Instances(new java.io.BufferedReader(new java.io.FileReader(d.FileName)));
                        m_loadStepInstances.setClassIndex(m_loadStepInstances.numAttributes() - 1);

                        clear_all();
                    }
                }
            }
            else
            {
                for (int i = m_loadStepIdx; i < m_loadStepInstances.numInstances(); ++i)
                {
                    var ins = m_loadStepInstances.instance(i);
                    var p   = new valuePoint(ins.value(0), ins.value(1), (int)ins.classValue());
                    if (p.x < 0 || p.x >= 1 || p.y < 0 || p.y >= 1)
                    {
                        continue;
                    }

                    point_list.Add(p);

                    draw_point(p);

                    m_loadStepIdx = i + 1;
                    if (i % 1000 == 0)
                    {
                        break;
                    }
                }

                pictureBox1.Invalidate();
                if (m_loadStepIdx == m_loadStepInstances.numInstances())
                {
                    m_loadStepIdx       = 0;
                    m_loadStepInstances = null;
                }
            }
        }
Beispiel #6
0
        //Knn
        public static double Knn(weka.core.Instances insts)
        {
            try
            {
                insts.setClassIndex(insts.numAttributes() - 1);

                Knncl = new weka.classifiers.lazy.IBk();

                weka.filters.Filter myDummy = new weka.filters.unsupervised.attribute.NominalToBinary();
                myDummy.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myDummy);

                weka.filters.Filter myNormalize = new weka.filters.unsupervised.instance.Normalize();
                myNormalize.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myNormalize);

                weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                myRandom.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myRandom);

                int trainSize             = insts.numInstances() * percentSplit / 100;
                int testSize              = insts.numInstances() - trainSize;
                weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);

                Knncl.buildClassifier(train);


                int numCorrect = 0;
                for (int i = trainSize; i < insts.numInstances(); i++)
                {
                    weka.core.Instance currentInst    = insts.instance(i);
                    double             predictedClass = Knncl.classifyInstance(currentInst);
                    if (predictedClass == insts.instance(i).classValue())
                    {
                        numCorrect++;
                    }
                }
                return((double)numCorrect / (double)testSize * 100.0);
            }
            catch (java.lang.Exception ex)
            {
                ex.printStackTrace();
                return(0);
            }
        }
Beispiel #7
0
        public void Test2()
        {
            java.io.ObjectInputStream   ois = new java.io.ObjectInputStream(new java.io.FileInputStream("D:\\android_analysis\\som_model.model"));
            weka.classifiers.Classifier cl  = (weka.classifiers.Classifier)ois.readObject();
            ois.close();

            weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader("D:\\android_analysis\\test1.arff"));
            insts.setClassIndex(insts.numAttributes() - 1);
            for (int i = 0; i < insts.numInstances(); i++)
            {
                weka.core.Instance currentInst    = insts.instance(i);
                double             predictedClass = cl.classifyInstance(currentInst);
                double[]           distrs         = cl.distributionForInstance(currentInst);
                //string actual = insts.classAttribute().value((int)currentInst.classValue());
                //string predicted = insts.classAttribute().value((int)predictedClass);
                // System.Console.WriteLine("ID: " + (i + 1) + ", " + predicted);
            }
        }
Beispiel #8
0
        public static string classifyTest()
        {
            try
            {
                String result = "";

                weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader("C:\\Program Files\\Weka-3-7\\data\\iris.arff"));
                insts.setClassIndex(insts.numAttributes() - 1);

                weka.classifiers.Classifier cl = new weka.classifiers.trees.J48();
                //  Console.WriteLine("Performing " + percentSplit + "% split evaluation.");
                result += "Performing " + percentSplit + "% split evaluation.\n";
                //randomize the order of the instances in the dataset.
                weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                myRandom.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myRandom);

                int trainSize             = insts.numInstances() * percentSplit / 100;
                int testSize              = insts.numInstances() - trainSize;
                weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);

                cl.buildClassifier(train);
                int numCorrect = 0;
                for (int i = trainSize; i < insts.numInstances(); i++)
                {
                    weka.core.Instance currentInst    = insts.instance(i);
                    double             predictedClass = cl.classifyInstance(currentInst);
                    if (predictedClass == insts.instance(i).classValue())
                    {
                        numCorrect++;
                    }
                }
                //Console.WriteLine(numCorrect + " out of " + testSize + " correct (" + (double)((double)numCorrect / (double)testSize * 100.0) + "%)");
                result += (numCorrect + " out of " + testSize + " correct (" + (double)((double)numCorrect / (double)testSize * 100.0) + "%)");

                return(result);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
                return(ex.Message);
            }
        }
Beispiel #9
0
        public void flatten_attribute()
        {
            FlattenClass[] rows = new[] {
                new FlattenClass {
                    CLASS = 0, Att1 = new [] { 1, 2, 3, 4, 5 }
                },
                new FlattenClass {
                    CLASS = 1, Att1 = new [] { 6, 7, 8, 9, 10 }
                }
            };
            InstancesBuilder <FlattenClass> builder = new InstancesBuilder <FlattenClass>(rows, 0);

            weka.core.Instances instances = builder.Build();

            Assert.AreEqual(2, instances.numInstances());
            Assert.AreEqual(6, instances.numAttributes());

            CollectionAssert.AreEqual(new[] { "1", "2" }, instances.GetAttrStrings(1));
            CollectionAssert.AreEqual(new[] { "1.2", "2.2" }, instances.GetAttrStrings(2));
        }
Beispiel #10
0
        public void test_ignore_attributes()
        {
            TestingRow6[] rows = new[] {
                new TestingRow6 {
                    CLASS = 1.0, ATT_1 = "1", ATT_2 = "1.1", ATT_3 = "1.2"
                },
                new TestingRow6 {
                    CLASS = 2.0, ATT_1 = "2", ATT_2 = "2.1", ATT_3 = "2.2"
                }
            };
            InstancesBuilder <TestingRow6> builder = new InstancesBuilder <TestingRow6>(rows, 0);

            weka.core.Instances instances = builder.Build();

            Assert.AreEqual(3, instances.numAttributes());             // 1 is ignored
            Assert.AreEqual(2, instances.numInstances());

            CollectionAssert.AreEqual(new[] { "1", "2" }, instances.GetAttrStrings(1));
            CollectionAssert.AreEqual(new[] { "1.2", "2.2" }, instances.GetAttrStrings(2));
        }
Beispiel #11
0
        /// <summary>
        /// Uses the classifier to classify an instance (from its featureValues).
        /// </summary>
        /// <param name="featureValues">An array of doubles that describe the instance.</param>
        /// <returns>The string name of the classification of the instance.</returns>
        public string classify(double[] featureValues)
        {
            //if (!classifierBuilt) { _classifier.buildClassifier(_dataSet); classifierBuilt = true; }

            weka.core.Instance inst = new weka.core.Instance(1, featureValues);
            inst.setDataset(_dataSet);

            double result = _classifier.classifyInstance(inst);

            weka.core.Attribute attribute = _dataSet.attribute(_dataSet.numAttributes() - 1);
            string resultName             = attribute.value((int)result);

            // Get rid of this line once ARFF files are rewritten
            if (resultName == "Label")
            {
                resultName = "Text";
            }

            //Console.WriteLine(resultName);
            return(resultName);
        }
    /* Use when the player logs in to initially create the classifier with data from server */
    public void InitializeClassifier(String dataString)
    {
        try {
            java.io.StringReader   stringReader = new java.io.StringReader(dataString);
            java.io.BufferedReader buffReader   = new java.io.BufferedReader(stringReader);

            playerData = new weka.core.Instances(buffReader);

            /* State where in each Instance the class attribute is, if its not already specified by the file */
            if (playerData.classIndex() == -1)
            {
                playerData.setClassIndex(playerData.numAttributes() - 1);
            }

            /* NAIVE BAYES */
            //classifier = new weka.classifiers.bayes.NaiveBayes();

            /* NEURAL NET */
            //classifier = new weka.classifiers.functions.MultilayerPerceptron();
            //((weka.classifiers.functions.MultilayerPerceptron)classifier).setHiddenLayers("12");

            /* J48 TREE */
            //classifier = new weka.classifiers.trees.J48();

            /* IB1 NEAREST NEIGHBOUR */
            //classifier = new weka.classifiers.lazy.IB1();

            /* RANDOM FOREST */
            classifier = new weka.classifiers.trees.RandomForest();


            classifier.buildClassifier(playerData);
            Debug.Log("Initialized Classifier");
        }
        catch (java.lang.Exception ex)
        {
            Debug.LogError(ex.getMessage());
        }
    }
Beispiel #13
0
        public static void classifyTest()
        {
            try
            {
                Console.WriteLine("Hello Java, from C#!");
                weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader("D:/iris.arff"));
                insts.setClassIndex(insts.numAttributes() - 1);

                weka.classifiers.Classifier cl = new weka.classifiers.trees.J48();
                Console.WriteLine("Performing " + 33 + "% split evaluation.");

                //randomize the order of the instances in the dataset.
                weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                myRandom.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myRandom);

                int trainSize = insts.numInstances() * 33 / 100;
                int testSize = insts.numInstances() - trainSize;
                weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);

                cl.buildClassifier(train);
                int numCorrect = 0;
                for (int i = trainSize; i < insts.numInstances(); i++)
                {
                    weka.core.Instance currentInst = insts.instance(i);
                    double predictedClass = cl.classifyInstance(currentInst);
                    if (predictedClass == insts.instance(i).classValue())
                        numCorrect++;
                }
                Console.WriteLine(numCorrect + " out of " + testSize + " correct (" +
                           (double)((double)numCorrect / (double)testSize * 100.0) + "%)");
            }
            catch (Exception ex)
            {

            }
        }
        public weka.core.Instances[] GenerateRegressionInstances(int rIdx)
        {
            var instances = new weka.core.Instances[3];
            string dataName = ((Rounds)rIdx).ToString().ToLower() + "_{0}_data";
            for(int i = 0; i < 3; i++)
            {
                var atts = generateStateAttributes(rIdx);
                string className = i == 0 ? "Fold" : i == 1 ? "Call" : "Raise";
                atts.addElement(new weka.core.Attribute(className));
                var data = new weka.core.Instances(string.Format(dataName, className.ToLower()), atts, 0);

                data.setClassIndex(data.numAttributes() - 1);
                instances[i] = data;
            }
            return instances;
        }
        public weka.core.Instances GenerateClassifierInstances(int rIdx)
        {
            var atts = generateStateAttributes(rIdx);

            var classVals = new weka.core.FastVector();
            classVals.addElement("Fold");
            classVals.addElement("Call");
            classVals.addElement("Raise");
            atts.addElement(new weka.core.Attribute("Action", classVals));

            var data = new weka.core.Instances(((Rounds)rIdx).ToString().ToLower() + "_data", atts, 0);

            data.setClassIndex(data.numAttributes() - 1);

            return data;
        }
Beispiel #16
0
		/// <summary> Method for testing filters ability to process multiple batches.
		/// 
		/// </summary>
		/// <param name="argv">should contain the following arguments:<br>
		/// -i (first) input file <br>
		/// -o (first) output file <br>
		/// -r (second) input file <br>
		/// -s (second) output file <br>
		/// -c class_index <br>
		/// or -h for help on options
		/// </param>
		/// <exception cref="Exception">if something goes wrong or the user requests help on
		/// command options
		/// </exception>
		public static void  batchFilterFile(Filter filter, System.String[] options)
		{
			
			Instances firstData = null;
			Instances secondData = null;
			//UPGRADE_ISSUE: Class hierarchy differences between 'java.io.Reader' and 'System.IO.StreamReader' may cause compilation errors. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1186'"
			System.IO.StreamReader firstInput = null;
			//UPGRADE_ISSUE: Class hierarchy differences between 'java.io.Reader' and 'System.IO.StreamReader' may cause compilation errors. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1186'"
			System.IO.StreamReader secondInput = null;
			System.IO.StreamWriter firstOutput = null;
			System.IO.StreamWriter secondOutput = null;
			bool helpRequest;
			try
			{
				helpRequest = Utils.getFlag('h', options);
				
				System.String fileName = Utils.getOption('i', options);
				if (fileName.Length != 0)
				{
					//UPGRADE_TODO: The differences in the expected value  of parameters for constructor 'java.io.BufferedReader.BufferedReader'  may cause compilation errors.  "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1092'"
					//UPGRADE_WARNING: At least one expression was used more than once in the target code. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1181'"
					//UPGRADE_TODO: Constructor 'java.io.FileReader.FileReader' was converted to 'System.IO.StreamReader' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073'"
					firstInput = new System.IO.StreamReader(new System.IO.StreamReader(fileName, System.Text.Encoding.Default).BaseStream, new System.IO.StreamReader(fileName, System.Text.Encoding.Default).CurrentEncoding);
				}
				else
				{
					throw new System.Exception("No first input file given.\n");
				}
				
				fileName = Utils.getOption('r', options);
				if (fileName.Length != 0)
				{
					//UPGRADE_TODO: The differences in the expected value  of parameters for constructor 'java.io.BufferedReader.BufferedReader'  may cause compilation errors.  "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1092'"
					//UPGRADE_WARNING: At least one expression was used more than once in the target code. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1181'"
					//UPGRADE_TODO: Constructor 'java.io.FileReader.FileReader' was converted to 'System.IO.StreamReader' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073'"
					secondInput = new System.IO.StreamReader(new System.IO.StreamReader(fileName, System.Text.Encoding.Default).BaseStream, new System.IO.StreamReader(fileName, System.Text.Encoding.Default).CurrentEncoding);
				}
				else
				{
					throw new System.Exception("No second input file given.\n");
				}
				
				fileName = Utils.getOption('o', options);
				if (fileName.Length != 0)
				{
					//UPGRADE_TODO: Constructor 'java.io.FileOutputStream.FileOutputStream' was converted to 'System.IO.FileStream.FileStream' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073_javaioFileOutputStreamFileOutputStream_javalangString'"
					firstOutput = new System.IO.StreamWriter(new System.IO.FileStream(fileName, System.IO.FileMode.Create), System.Text.Encoding.Default);
				}
				else
				{
					firstOutput = new System.IO.StreamWriter(System.Console.OpenStandardOutput(), System.Text.Encoding.Default);
				}
				
				fileName = Utils.getOption('s', options);
				if (fileName.Length != 0)
				{
					//UPGRADE_TODO: Constructor 'java.io.FileOutputStream.FileOutputStream' was converted to 'System.IO.FileStream.FileStream' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073_javaioFileOutputStreamFileOutputStream_javalangString'"
					secondOutput = new System.IO.StreamWriter(new System.IO.FileStream(fileName, System.IO.FileMode.Create), System.Text.Encoding.Default);
				}
				else
				{
					secondOutput = new System.IO.StreamWriter(System.Console.OpenStandardOutput(), System.Text.Encoding.Default);
				}
				System.String classIndex = Utils.getOption('c', options);
				
				//			if (filter instanceof OptionHandler) 
				//			{
				//				((OptionHandler)filter).setOptions(options);
				//			}
				Utils.checkForRemainingOptions(options);
				
				if (helpRequest)
				{
					throw new System.Exception("Help requested.\n");
				}
				firstData = new Instances(firstInput, 1);
				secondData = new Instances(secondInput, 1);
				if (!secondData.equalHeaders(firstData))
				{
					throw new System.Exception("Input file formats differ.\n");
				}
				if (classIndex.Length != 0)
				{
					if (classIndex.Equals("first"))
					{
						firstData.ClassIndex = 0;
						secondData.ClassIndex = 0;
					}
					else if (classIndex.Equals("last"))
					{
						firstData.ClassIndex = firstData.numAttributes() - 1;
						secondData.ClassIndex = secondData.numAttributes() - 1;
					}
					else
					{
						firstData.ClassIndex = System.Int32.Parse(classIndex) - 1;
						secondData.ClassIndex = System.Int32.Parse(classIndex) - 1;
					}
				}
			}
			catch (System.Exception ex)
			{
				System.String filterOptions = "";
				// Output the error and also the valid options
				//			if (filter instanceof OptionHandler) 
				//			{
				//				filterOptions += "\nFilter options:\n\n";
				//				Enumeration enu = ((OptionHandler)filter).listOptions();
				//				while (enu.hasMoreElements()) 
				//				{
				//					Option option = (Option) enu.nextElement();
				//					filterOptions += option.synopsis() + '\n'
				//						+ option.description() + "\n";
				//				}
				//			}
				
				System.String genericOptions = "\nGeneral options:\n\n" + "-h\n" + "\tGet help on available options.\n" + "-i <filename>\n" + "\tThe file containing first input instances.\n" + "-o <filename>\n" + "\tThe file first output instances will be written to.\n" + "-r <filename>\n" + "\tThe file containing second input instances.\n" + "-s <filename>\n" + "\tThe file second output instances will be written to.\n" + "-c <class index>\n" + "\tThe number of the attribute to use as the class.\n" + "\t\"first\" and \"last\" are also valid entries.\n" + "\tIf not supplied then no class is assigned.\n";
				
				//UPGRADE_TODO: The equivalent in .NET for method 'java.lang.Throwable.getMessage' may return a different value. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1043'"
				throw new System.Exception('\n' + ex.Message + filterOptions + genericOptions);
			}
			bool printedHeader = false;
			if (filter.setInputFormat(firstData))
			{
				//UPGRADE_TODO: Method 'java.io.PrintWriter.println' was converted to 'System.IO.TextWriter.WriteLine' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073_javaioPrintWriterprintln_javalangString'"
				firstOutput.WriteLine(filter.getOutputFormat().ToString());
				printedHeader = true;
			}
			
			// Pass all the instances to the filter
			while (firstData.readInstance(firstInput))
			{
				if (filter.input(firstData.instance(0)))
				{
					if (!printedHeader)
					{
						throw new System.ApplicationException("Filter didn't return true from setInputFormat() " + "earlier!");
					}
					//UPGRADE_TODO: Method 'java.io.PrintWriter.println' was converted to 'System.IO.TextWriter.WriteLine' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073_javaioPrintWriterprintln_javalangString'"
					firstOutput.WriteLine(filter.output().ToString());
				}
				firstData.delete(0);
			}
			
			// Say that input has finished, and print any pending output instances
			if (filter.batchFinished())
			{
				if (!printedHeader)
				{
					//UPGRADE_TODO: Method 'java.io.PrintWriter.println' was converted to 'System.IO.TextWriter.WriteLine' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073_javaioPrintWriterprintln_javalangString'"
					firstOutput.WriteLine(filter.getOutputFormat().ToString());
				}
				while (filter.numPendingOutput() > 0)
				{
					//UPGRADE_TODO: Method 'java.io.PrintWriter.println' was converted to 'System.IO.TextWriter.WriteLine' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073_javaioPrintWriterprintln_javalangString'"
					firstOutput.WriteLine(filter.output().ToString());
				}
			}
			
			if (firstOutput != null)
			{
				//UPGRADE_NOTE: Exceptions thrown by the equivalent in .NET of method 'java.io.PrintWriter.close' may be different. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1099'"
				firstOutput.Close();
			}
			printedHeader = false;
			if (filter.OutputFormatDefined)
			{
				//UPGRADE_TODO: Method 'java.io.PrintWriter.println' was converted to 'System.IO.TextWriter.WriteLine' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073_javaioPrintWriterprintln_javalangString'"
				secondOutput.WriteLine(filter.getOutputFormat().ToString());
				printedHeader = true;
			}
			// Pass all the second instances to the filter
			while (secondData.readInstance(secondInput))
			{
				if (filter.input(secondData.instance(0)))
				{
					if (!printedHeader)
					{
						throw new System.ApplicationException("Filter didn't return true from" + " isOutputFormatDefined() earlier!");
					}
					//UPGRADE_TODO: Method 'java.io.PrintWriter.println' was converted to 'System.IO.TextWriter.WriteLine' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073_javaioPrintWriterprintln_javalangString'"
					secondOutput.WriteLine(filter.output().ToString());
				}
				secondData.delete(0);
			}
			
			// Say that input has finished, and print any pending output instances
			if (filter.batchFinished())
			{
				if (!printedHeader)
				{
					//UPGRADE_TODO: Method 'java.io.PrintWriter.println' was converted to 'System.IO.TextWriter.WriteLine' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073_javaioPrintWriterprintln_javalangString'"
					secondOutput.WriteLine(filter.getOutputFormat().ToString());
				}
				while (filter.numPendingOutput() > 0)
				{
					//UPGRADE_TODO: Method 'java.io.PrintWriter.println' was converted to 'System.IO.TextWriter.WriteLine' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073_javaioPrintWriterprintln_javalangString'"
					secondOutput.WriteLine(filter.output().ToString());
				}
			}
			if (secondOutput != null)
			{
				//UPGRADE_NOTE: Exceptions thrown by the equivalent in .NET of method 'java.io.PrintWriter.close' may be different. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1099'"
				secondOutput.Close();
			}
		}
Beispiel #17
0
		/// <summary> Gets an array containing the indices of all string attributes.
		/// 
		/// </summary>
		/// <param name="insts">the Instances to scan for string attributes. 
		/// </param>
		/// <returns> an array containing the indices of string attributes in
		/// the input structure. Will be zero-length if there are no
		/// string attributes
		/// </returns>
		protected internal virtual int[] getStringIndices(Instances insts)
		{
			
			// Scan through getting the indices of String attributes
			int[] index = new int[insts.numAttributes()];
			int indexSize = 0;
			for (int i = 0; i < insts.numAttributes(); i++)
			{
				if (insts.attribute(i).type() == Attribute.STRING)
				{
					index[indexSize++] = i;
				}
			}
			int[] result = new int[indexSize];
			Array.Copy(index, 0, result, 0, indexSize);
			return result;
		}
Beispiel #18
0
		/// <summary> Copies string values contained in the instance copied to a new
		/// dataset. The Instance must already be assigned to a dataset. This
		/// dataset and the destination dataset must have the same structure.
		/// 
		/// </summary>
		/// <param name="instance">the Instance containing the string values to copy.
		/// </param>
		/// <param name="destDataset">the destination set of Instances
		/// </param>
		/// <param name="strAtts">an array containing the indices of any string attributes
		/// in the dataset.  
		/// </param>
		private void  copyStringValues(Instance inst, Instances destDataset, int[] strAtts)
		{
			
			if (strAtts.Length == 0)
			{
				return ;
			}
			if (inst.dataset() == null)
			{
				throw new System.ArgumentException("Instance has no dataset assigned!!");
			}
			else if (inst.dataset().numAttributes() != destDataset.numAttributes())
			{
				throw new System.ArgumentException("Src and Dest differ in # of attributes!!");
			}
			copyStringValues(inst, true, inst.dataset(), strAtts, destDataset, strAtts);
		}
        public bool PrepareDataset()
        {
            try
            {
                weka.filters.Filter missingFilter = new weka.filters.unsupervised.attribute.ReplaceMissingValues(); // missing values handled
                missingFilter.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, missingFilter);

                bool                  isTargetNumeric = insts.attribute(insts.numAttributes() - 1).isNumeric();
                List <bool>           isNumeric       = new List <bool>();
                List <bool>           is2Categorical  = new List <bool>();
                List <List <string> > numericColumns  = new List <List <string> >();
                List <string>         atrNames        = new List <string>();

                for (int i = 0; i < insts.numAttributes(); i++)
                {
                    atrNames.Add(insts.attribute(i).name());
                    bool isNum = insts.attribute(i).isNumeric();
                    isNumeric.Add(isNum);

                    if (isNum == true)
                    {
                        numericColumns.Add(new List <string>());

                        for (int j = 0; j < insts.numInstances(); j++)
                        {
                            numericColumns[numericColumns.Count - 1].Add(insts.instance(j).toString(i));
                        }
                    }
                }

                weka.filters.unsupervised.attribute.Discretize myDiscretize = new weka.filters.unsupervised.attribute.Discretize();
                myDiscretize.setInputFormat(insts);
                myDiscretize.setFindNumBins(true);
                insts = weka.filters.Filter.useFilter(insts, myDiscretize);

                List <List <string> > atrs = new List <List <string> >();

                for (int i = 0; i < insts.numAttributes(); i++)
                {
                    atrs.Add(new List <string>());
                    for (int j = 0; j < insts.attribute(i).numValues(); j++)
                    {
                        string sub_category = insts.attribute(i).value(j);
                        string temp         = sub_category.Replace("'", string.Empty);
                        atrs[atrs.Count - 1].Add(temp);
                    }

                    if (atrs[atrs.Count - 1].Count == 2)
                    {
                        is2Categorical.Add(true);
                    }
                    else
                    {
                        is2Categorical.Add(false);
                    }
                }

                List <List <string> > lst = new List <List <string> >();

                for (int i = 0; i < insts.numInstances(); i++)
                {
                    lst.Add(new List <string>());

                    for (int j = 0; j < insts.instance(i).numValues(); j++)
                    {
                        string temp = insts.instance(i).toString(j);
                        temp = temp.Replace("\\", string.Empty);
                        temp = temp.Replace("'", string.Empty);
                        lst[lst.Count - 1].Add(temp);
                    }
                }

                List <string> targetValues = atrs[insts.numAttributes() - 1];

                List <List <string> > giniDataset = ConvertToNumericWithGini(lst, atrs);
                giniDataset = Arrange2CategoricalColumns(giniDataset, lst, is2Categorical);
                giniDataset = ChangeBackNumericalColumns(giniDataset, numericColumns, isNumeric);
                WriteFile(giniDataset, filename + "-numeric-gini.arff", atrNames, targetValues, isTargetNumeric);

                List <List <string> > twoingDataset = ConvertToNumericWithTwoing(lst, atrs);
                twoingDataset = Arrange2CategoricalColumns(twoingDataset, lst, is2Categorical);
                twoingDataset = ChangeBackNumericalColumns(twoingDataset, numericColumns, isNumeric);
                WriteFile(twoingDataset, filename + "-numeric-twoing.arff", atrNames, targetValues, isTargetNumeric);

                return(true);
            }
            catch (Exception e)
            {
                return(false);
            }
        }
Beispiel #20
0
        private void btnLoad_Click(object sender, EventArgs e)
        {
            using (OpenFileDialog d = new OpenFileDialog())
            {
                d.Filter = "Arff File|*.arff";
                if (d.ShowDialog() == System.Windows.Forms.DialogResult.OK)
                {
                    var fileReader = new java.io.FileReader(d.FileName);
                    var instances = new weka.core.Instances(new java.io.BufferedReader(fileReader));
                    instances.setClassIndex(instances.numAttributes() - 1);
                    fileReader.close();

                    clear_all();
                    foreach (weka.core.Instance i in instances)
                    {
                        var p = new valuePoint(i.value(0), i.value(1), (int)i.classValue());
                        if (p.x < 0 || p.x >= 1 || p.y < 0 || p.y >= 1)
                            continue;
                        point_list.Add(p);
                    }
                    draw_all_points();
                    this.pictureBox1.Invalidate();
                }
            }
        }
Beispiel #21
0
        public static string classifyTest(string file, string classifier)
        {
            string data = "No data";

            try
            {
                //weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader("C:\\Users\\kinli\\source\\repos\\WebApplication2\\WebApplication2\\iris.arff"));
                weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader(file));
                insts.setClassIndex(insts.numAttributes() - 1);
                weka.classifiers.Classifier cl = new weka.classifiers.trees.J48();

                if (classifier == "J48")
                {
                    cl = new weka.classifiers.trees.J48();
                }
                else if (classifier == "MLP")
                {
                    cl = new weka.classifiers.functions.MultilayerPerceptron();
                }
                else if (classifier == "NaiveBayes")
                {
                    cl = new weka.classifiers.bayes.NaiveBayes();
                }

                //data = ("Performing " + percentSplit + "% split evaluation.\n");
                data = ("Performing use training set evaluation.\n");
                //randomize the order of the instances in the dataset.

                weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                myRandom.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myRandom);

                /*
                 * int trainSize = insts.numInstances() * percentSplit / 100;
                 * int testSize = insts.numInstances() - trainSize;
                 * weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);
                 *
                 * cl.buildClassifier(train);
                 * int numCorrect = 0;
                 * for (int i = trainSize; i < insts.numInstances(); i++)
                 * {
                 *  weka.core.Instance currentInst = insts.instance(i);
                 *  double predictedClass = cl.classifyInstance(currentInst);
                 *  if (predictedClass == insts.instance(i).classValue())
                 *      numCorrect++;
                 * }*/
                cl.buildClassifier(insts);

                int numCorrect = 0;
                for (int i = 0; i < insts.numInstances(); i++)
                {
                    weka.core.Instance currentInst    = insts.instance(i);
                    double             predictedClass = cl.classifyInstance(currentInst);
                    if (predictedClass == insts.instance(i).classValue())
                    {
                        numCorrect++;
                    }
                }
                data = data + (numCorrect + " out of " + insts.numInstances() + " correct (" +
                               (double)((double)numCorrect / (double)insts.numInstances() * 100.0) + "%)");
            }
            catch (java.lang.Exception ex)
            {
                data = "Error";
                ex.printStackTrace();
            }
            return(data);
        }
Beispiel #22
0
        public override double classifyInstance(weka.core.Instance instance)
        {
            if (m_instances.numInstances() == 0)
            {
                return(2);
            }

            if (m_instances.numAttributes() != instance.numAttributes())
            {
                throw new AssertException("different attribute.");
            }
            int n = (instance.numAttributes() - 1) / 2;
            List <Tuple <int, int> > dist = new List <Tuple <int, int> >();

            for (int i = 0; i < m_instances.numInstances(); ++i)
            {
                int d1 = 0, d2 = 0;
                weka.core.Instance instanceI = m_instances.instance(i);
                for (int j = 0; j < n; ++j)
                {
                    //d += (int)((instanceI.value(j) - instance.value(j)) * (instanceI.value(j) - instance.value(j)));
                    if (instanceI.value(j) != instance.value(j))
                    {
                        if (instance.value(j) == 2 || instanceI.value(j) == 2)
                        {
                            d1++;
                        }
                        else
                        {
                            d1 += 4;
                        }
                    }
                }
                for (int j = n; j < 2 * n; ++j)
                {
                    //d += (int)((instanceI.value(j) - instance.value(j)) * (instanceI.value(j) - instance.value(j)));
                    if (instanceI.value(j) != instance.value(j))
                    {
                        if (instance.value(j) == 2 || instanceI.value(j) == 2)
                        {
                            d2++;
                        }
                        else
                        {
                            d2 += 4;
                        }
                    }
                }
                int c = (int)instanceI.classValue();
                //if (c == 0)
                //{
                //    if (d1 < n / 4 && d1 < d2)
                //    {
                //        dist.Add(new Tuple<int, int>(d1, c));
                //    }
                //}
                //else if (c == 1)
                //{
                //    if (d2 < n / 4 && d2 < d1)
                //    {
                //        dist.Add(new Tuple<int, int>(d2, c));
                //    }
                //}
                //else
                //{
                //    throw new AssertException("");
                //}
                dist.Add(new Tuple <int, int>(d1 + d2, c));
            }
            if (dist.Count == 0)
            {
                return(2);
            }

            dist.Sort(new Comparison <Tuple <int, int> >((x, y) =>
            {
                return(x.Item1.CompareTo(y.Item1));
            }));

            int sum = 0, count = 0;

            for (int i = 0; i < dist.Count; ++i)
            {
                if (dist[i].Item1 < n / 4 * 2 * 4)
                {
                    if (dist[i].Item2 != 2 && dist[i].Item2 != 3)
                    {
                        sum += dist[i].Item2;
                        count++;
                    }
                    else
                    {
                    }
                }
                else
                {
                    break;
                }
            }
            if (count == 0)
            {
                return(2);
            }
            if (count < m_instances.numInstances() / 30)
            {
                return(2);
            }
            return((int)Math.Round((double)sum / count));
        }
        private void button1_Click(object sender, EventArgs e)
        {
            string         fname  = "";
            OpenFileDialog dialog = new OpenFileDialog();

            dialog.Filter =
                "Weka Files (*.arff)|*.arff|All files (*.*)|*.*";
            dialog.InitialDirectory = Application.StartupPath;
            dialog.Title            = "Select a .arff file";
            if (dialog.ShowDialog() == DialogResult.OK)
            {
                fname = dialog.FileName;
                //label5.Text = System.IO.Directory.;
            }
            if (fname == "")
            {
                return;
            }
            try
            {
                weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader(fname.ToString()));
                insts.setClassIndex(insts.numAttributes() - 1);


                Classifier cl = new weka.classifiers.functions.SMO();
                //label1.Text = "Performing " + percentSplit + "% split evaluation.";


                //randomize the order of the instances in the dataset.
                weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                myRandom.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myRandom);

                int trainSize             = insts.numInstances() * percentSplit / 100;
                int testSize              = insts.numInstances() - trainSize;
                weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);

                cl.buildClassifier(train);
                int numCorrect = 0;
                for (int i = trainSize; i < insts.numInstances(); i++)
                {
                    weka.core.Instance currentInst    = insts.instance(i);
                    double             predictedClass = cl.classifyInstance(currentInst);
                    if (predictedClass == insts.instance(i).classValue())
                    {
                        numCorrect++;
                    }
                }
                //label1.Text = numCorrect + " out of " + testSize + " correct (" +
                //(double)((double)numCorrect / (double)testSize * 100.0) + "%)";

                label6.Text = testSize.ToString();
                label7.Text = numCorrect.ToString();
                label8.Text = (double)((double)numCorrect / (double)testSize * 100.0) + "%";
                double result_perc = (double)((double)numCorrect / (double)testSize * 100.0);

                result_perc = Math.Truncate(result_perc);

                try
                {
                    // Send Data On Serial port
                    SerialPort serialPort = new SerialPort("COM" + textBox1.Text + "", Int32.Parse(textBox2.Text), Parity.None, 8);
                    serialPort.Open();

                    if (result_perc <= 75)
                    {
                        serialPort.WriteLine("1");
                    }


                    serialPort.WriteLine("a");


                    serialPort.Close();
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.Message);
                }
            }
            catch (java.lang.Exception ex)
            {
                MessageBox.Show(ex.getMessage().ToString(), "");
            }
        }
Beispiel #24
0
        //private void ConvertNorminalToString(string fileName)
        //{
        //    List<string> list = new List<string>();
        //    using (System.IO.StreamReader sr = new System.IO.StreamReader(fileName))
        //    {
        //        while (true)
        //        {
        //            if (sr.EndOfStream)
        //                break;
        //            string s = sr.ReadLine();
        //            if (string.IsNullOrEmpty(s))
        //                continue;
        //            int idx = s.IndexOf(' ');
        //            string c = idx == -1 ? s : s.Substring(0, idx);
        //            if (Convert.ToDouble(c) == 0)
        //            {
        //                list.Add("-1.0 " + (idx == -1 ? string.Empty : s.Substring(idx + 1)));
        //            }
        //            else if (Convert.ToDouble(c) == 1)
        //            {
        //                list.Add("0.0 " + (idx == -1 ? string.Empty : s.Substring(idx + 1)));
        //            }
        //            else if (Convert.ToDouble(c) == 2)
        //            {
        //                list.Add("+1.0 " + (idx == -1 ? string.Empty : s.Substring(idx + 1)));
        //            }
        //            else
        //            {
        //                list.Add(s);
        //            }
        //        }
        //    }
        //    using (System.IO.StreamWriter sw = new System.IO.StreamWriter(fileName))
        //    {
        //        foreach (string s in list)
        //        {
        //            sw.WriteLine(s);
        //        }
        //    }
        //}
        //private Random m_randomGenerator;
        private void AddInstancesAccordWeight(Instances instances)
        {
            // 0, 2
            double[] weights = MincostLiblinearClassifier.GetCount(instances);
            if (weights == null)
                return;

            double c = m_tp / m_sl;
            if (c == 1 && weights[0] == weights[1])
                return;

            int n = 0;
            int toCopyClass = 0;
            if (c >= 1)
            {
                int shouldWeight1 = (int)(c * weights[1]);
                n = (int)(shouldWeight1 - weights[1]);
                toCopyClass = 2;
            }
            else
            {
                int shouldShouldWeight0 = (int)(1 / c * weights[0]);
                n = (int)(weights[1] - weights[0]);
                toCopyClass = 0;
            }
            //m_randomGenerator = new Random((int)System.DateTime.Now.Ticks);

            List<Instance> copyInstances = new List<Instance>();
            for (int i = 0; i < instances.numInstances(); ++i)
            {
                if (instances.instance(i).classValue() == toCopyClass)
                {
                    copyInstances.Add(instances.instance(i));
                }
            }

            int nAll = n / copyInstances.Count;
            for (int j = 0; j < nAll; ++j)
            {
                for (int i = 0; i < copyInstances.Count; ++i)
                {
                    Instance newInstance = new weka.core.DenseInstance(copyInstances[i]);
                    instances.add(newInstance);
                    newInstance.setDataset(instances);
                }
            }
            //for (int j = 0; j < n - nAll * copyInstances.Count; ++j)
            //{
            //    int idx = (int)(m_randomGenerator.NextDouble() * copyInstances.Count);
            //    idx = Math.Min(idx, copyInstances.Count - 1);
            //    Instance newInstance = new weka.core.DenseInstance(copyInstances[idx]);
            //    instances.add(newInstance);
            //    newInstance.setDataset(instances);
            //}

            if (n - nAll * copyInstances.Count > 0)
            {
                Instance avgInstance = new weka.core.DenseInstance(instances.numAttributes());
                for (int i = 0; i < avgInstance.numAttributes(); ++i)
                {
                    double sum = 0;
                    for (int j = 0; j < copyInstances.Count; ++j)
                    {
                        sum += copyInstances[j].value(i);
                    }
                    avgInstance.setValue(i, sum / copyInstances.Count);
                }
                for (int j = 0; j < n - nAll * copyInstances.Count; ++j)
                {
                    Instance newInstance = new weka.core.DenseInstance(avgInstance);
                    instances.add(newInstance);
                }
            }
        }
Beispiel #25
0
        private void btnDiscover_Click(object sender, EventArgs e)
        {
            string type  = model.GetType().ToString();
            bool   flag  = false;
            bool   flag2 = false;

            //input kontrolleri
            if (nominal != null)
            {
                for (int i = 0; i < nominal.Length; i++)
                {
                    if (nominal[i].SelectedIndex == -1)
                    {
                        flag = true;
                        break;
                    }
                }
            }
            if (numeric != null)
            {
                for (int i = 0; i < numeric.Length; i++)
                {
                    if (String.IsNullOrEmpty(numeric[i].Text))
                    {
                        flag2 = true;
                        break;
                    }
                }
            }
            if (numAtt == numeric.Length && flag2 == true)
            {
                MessageBox.Show("Please select value!", "Error Message!");
            }
            else if (numAtt == nominal.Length && flag == true)
            {
                MessageBox.Show("Please select value!", "Error Message!");
            }
            else if ((nominal.Length + numeric.Length) == numAtt && (flag == true || flag2 == true))
            {
                MessageBox.Show("Please select value!", "Error Message!");
            }
            else
            {
                weka.core.Instance newIns = new weka.core.Instance(numAtt + 1);
                newIns.setDataset(insts);

                int i1 = 0, i2 = 0;
                for (int i = 0; i < numAtt; i++)
                {
                    //nominal
                    if (typeAtt[i])
                    {
                        newIns.setValue(i, nominal[i1].SelectedItem.ToString());
                        i1++;
                    }
                    //numeric
                    else
                    {
                        newIns.setValue(i, double.Parse(numeric[i2].Text));
                        i2++;
                    }
                }

                weka.core.Instances insts2 = new weka.core.Instances(insts);
                insts2.add(newIns);

                if (type == "weka.classifiers.bayes.NaiveBayes")
                {
                    weka.filters.Filter myDiscretize = new weka.filters.unsupervised.attribute.Discretize();
                    myDiscretize.setInputFormat(insts2);
                    insts2 = weka.filters.Filter.useFilter(insts2, myDiscretize);
                }

                else if (type == "weka.classifiers.functions.Logistic")
                {
                    weka.filters.Filter myDummy = new weka.filters.unsupervised.attribute.NominalToBinary();
                    myDummy.setInputFormat(insts2);
                    insts2 = weka.filters.Filter.useFilter(insts2, myDummy);

                    weka.filters.Filter myNormalize = new weka.filters.unsupervised.instance.Normalize();
                    myNormalize.setInputFormat(insts2);
                    insts2 = weka.filters.Filter.useFilter(insts2, myNormalize);
                }

                else if (type == "new weka.classifiers.lazy.IBk")
                {
                    weka.filters.Filter myDummy = new weka.filters.unsupervised.attribute.NominalToBinary();
                    myDummy.setInputFormat(insts2);
                    insts2 = weka.filters.Filter.useFilter(insts2, myDummy);

                    weka.filters.Filter myNormalize = new weka.filters.unsupervised.instance.Normalize();
                    myNormalize.setInputFormat(insts2);
                    insts2 = weka.filters.Filter.useFilter(insts2, myNormalize);
                }
                else if (type == "weka.classifiers.trees.J48")
                {
                }
                else if (type == "weka.classifiers.trees.RandomForest")
                {
                }
                else if (type == "weka.classifiers.trees.RandomTree")
                {
                }
                else if (type == "weka.classifiers.functions.MultilayerPerceptron")
                {
                    weka.filters.Filter myDummy = new weka.filters.unsupervised.attribute.NominalToBinary();
                    myDummy.setInputFormat(insts2);
                    insts2 = weka.filters.Filter.useFilter(insts2, myDummy);

                    weka.filters.Filter myNormalize = new weka.filters.unsupervised.instance.Normalize();
                    myNormalize.setInputFormat(insts2);
                    insts2 = weka.filters.Filter.useFilter(insts2, myNormalize);
                }
                else if (type == "weka.classifiers.functions.SMO")
                {
                    weka.filters.Filter myDummy = new weka.filters.unsupervised.attribute.NominalToBinary();
                    myDummy.setInputFormat(insts2);
                    insts2 = weka.filters.Filter.useFilter(insts2, myDummy);

                    weka.filters.Filter myNormalize = new weka.filters.unsupervised.instance.Normalize();
                    myNormalize.setInputFormat(insts2);
                    insts2 = weka.filters.Filter.useFilter(insts2, myNormalize);
                }

                double index = model.classifyInstance(insts2.lastInstance());
                //Model okuma kısmı
                weka.classifiers.Classifier cls = (weka.classifiers.Classifier)weka.core.SerializationHelper.read("models/mdl.model");
                lblResult2.Text = "Result= " + insts2.attribute(insts2.numAttributes() - 1).value(Convert.ToInt16(index));
            }
        }
Beispiel #26
0
        //Dosya seçim bölümü ve yüzde hesabı bölümü
        private void btnBrowse_Click(object sender, EventArgs e)
        {
            clears();
            OpenFileDialog file = new OpenFileDialog();

            file.Filter      = "Files (ARFF)|*.ARFF";
            file.Multiselect = false;
            file.Title       = "Please select a dataset file!";
            if (file.ShowDialog() == DialogResult.OK)
            {
                txtPath.Text = file.FileName;
                fileName     = file.SafeFileName;

                //dosya seçildikten sonra işlemi gerçekleştiriyor.
                try
                {
                    if (txtPath.Text.Length < 1)
                    {
                        MessageBox.Show("Please select file!", "Error Message!");
                        txtPath.Text = "";
                    }
                    else
                    {
                        this.Text = "Processing...";
                        insts     = new weka.core.Instances(new java.io.FileReader(txtPath.Text));
                        //naive bayes
                        double max_value = NaiveBayesTest(insts);
                        model = NaiveBayescl;
                        name  = "Naïve Bayes";

                        //logistic regression
                        double LogRegressionvalue = LogRegressionTest(insts);
                        if (LogRegressionvalue > max_value)
                        {
                            max_value = LogRegressionvalue;
                            model     = LogRegressioncl;
                            name      = "Logistic Regression";
                        }
                        //knn
                        double KnnValue = Knn(insts);
                        if (KnnValue > max_value)
                        {
                            max_value = KnnValue;
                            model     = Knncl;
                            name      = "K-Nearest Neighbour";
                        }
                        //J48
                        double J48Value = J48classifyTest(insts);
                        if (J48Value > max_value)
                        {
                            max_value = J48Value;
                            model     = J48cl;
                            name      = "Decision Tree(J48)";
                        }
                        //Random forest
                        double RFvalue = RandomForestTest(insts);
                        if (RFvalue > max_value)
                        {
                            max_value = RFvalue;
                            model     = RandomForestcl;
                            name      = "Decision Tree(Random Forest)";
                        }
                        //Random Tree
                        double RTvalue = RandomTreeTest(insts);
                        if (RTvalue > max_value)
                        {
                            max_value = RTvalue;
                            model     = RandomTreecl;
                            name      = "Decision Tree(Random Tree)";
                        }
                        //Artificial nn
                        double AnnValue = ArtificialNN(insts);
                        if (AnnValue > max_value)
                        {
                            max_value = AnnValue;
                            model     = Anncl;
                            name      = "Artificial Neural Network";
                        }
                        //Svm
                        double SvmValue = SVM(insts);
                        if (SvmValue > max_value)
                        {
                            max_value = SvmValue;
                            model     = Svmcl;
                            name      = "Support Vector Machine";
                        }

                        //Model kaydetme kısmı
                        weka.core.SerializationHelper.write("models/mdl.model", model);

                        lblResult.Text = name + " is the most successful algorithm for this data set (%" + string.Format("{0:0.00}", max_value) + ")";
                        this.Text      = "DEUCENG - ML Classification Tool";

                        //seçme işlemleri
                        numAtt = insts.numAttributes() - 1;

                        int x = 30, y = 130, t = 35, l = 110;
                        int txt = 0, cmb = 0, r1 = 0, r2 = 0;
                        labels = new Label[insts.numAttributes()];
                        for (int i = 0; i < numAtt; i++)
                        {
                            if (insts.attribute(i).isNumeric())
                            {
                                txt++;
                            }
                            else if (insts.attribute(i).isNominal())
                            {
                                cmb++;
                            }
                        }

                        nominal      = new ComboBox[cmb];
                        numeric      = new TextBox[txt];
                        typeAtt      = new bool[numAtt];
                        this.Height += (numAtt + 1) * t;

                        for (int i = 0; i < numAtt; i++)
                        {
                            if (insts.attribute(i).isNominal())
                            {
                                string[] s1 = insts.attribute(i).toString().Split('{');
                                string[] s2 = s1[1].Split('}');
                                string[] s3 = s2[0].Split(',');

                                nominal[r1] = new ComboBox();
                                labels[i]   = new Label();
                                for (int j = 0; j < s3.Length; j++)
                                {
                                    nominal[r1].Items.Add(s3[j].Replace('\'', ' ').Trim());
                                }
                                labels[i].Text = insts.attribute(i).name();
                                labels[i].Left = x;
                                labels[i].Top  = y;

                                nominal[r1].Left          = x + l;
                                nominal[r1].Top           = y;
                                nominal[r1].DropDownStyle = ComboBoxStyle.DropDownList;
                                y += t;
                                Controls.Add(nominal[r1]);
                                Controls.Add(labels[i]);
                                r1++;
                                typeAtt[i] = true;
                            }
                            else if (insts.attribute(i).isNumeric())
                            {
                                numeric[r2]      = new TextBox();
                                labels[i]        = new Label();
                                labels[i].Text   = insts.attribute(i).name();
                                labels[i].Left   = x;
                                labels[i].Top    = y;
                                numeric[r2].Left = x + l;
                                numeric[r2].Top  = y;
                                y += t;
                                Controls.Add(numeric[r2]);
                                Controls.Add(labels[i]);
                                r2++;
                                typeAtt[i] = false;
                            }

                            btnDiscover.Enabled = true;
                        }
                    }
                }
                catch (Exception e2)
                {
                    MessageBox.Show(e2.Message, "Error Message!");
                }
            }
        }
		/// <summary> Sets the format of the input instances.
		/// 
		/// </summary>
		/// <param name="instanceInfo">an Instances object containing the input instance
		/// structure (any instances contained in the object are ignored - only the
		/// structure is required).
		/// </param>
		/// <exception cref="UnsupportedAttributeTypeException">if the specified attribute
		/// is neither numeric or nominal.
		/// </exception>
		public override bool setInputFormat(Instances instanceInfo)
		{
			
			base.setInputFormat(instanceInfo);
			
			m_AttIndex.Upper=instanceInfo.numAttributes() - 1;
			if (!Numeric && !Nominal)
			{
				throw new Exception("Can only handle numeric " + "or nominal attributes.");
			}
			m_Values.Upper=instanceInfo.attribute(m_AttIndex.Index).numValues() - 1;
			if (Nominal && m_ModifyHeader)
			{
				instanceInfo = new Instances(instanceInfo, 0); // copy before modifying
				Attribute oldAtt = instanceInfo.attribute(m_AttIndex.Index);
				int[] selection = m_Values.Selection;
				FastVector newVals = new FastVector();
				for (int i = 0; i < selection.Length; i++)
				{
					newVals.addElement(oldAtt.value_Renamed(selection[i]));
				}
				instanceInfo.deleteAttributeAt(m_AttIndex.Index);
				instanceInfo.insertAttributeAt(new Attribute(oldAtt.name(), newVals), m_AttIndex.Index);
				m_NominalMapping = new int[oldAtt.numValues()];
				for (int i = 0; i < m_NominalMapping.Length; i++)
				{
					bool found = false;
					for (int j = 0; j < selection.Length; j++)
					{
						if (selection[j] == i)
						{
							m_NominalMapping[i] = j;
							found = true;
							break;
						}
					}
					if (!found)
					{
						m_NominalMapping[i] = - 1;
					}
				}
			}
			setOutputFormat(instanceInfo);
			return true;
		}
Beispiel #28
0
        public void classifyTest()
        {
            try
            {
                CSV2Arff();
                java.io.FileReader  arrfFile = new java.io.FileReader("D:/final_version/Gesture-Gis-master/GestureGis2/ComparisonFeaturefile.arff");
                weka.core.Instances insts    = new weka.core.Instances(arrfFile);
                //weka.core.Instances insts2 = new weka.core.Instances(new java.io.FileReader("D:/Gesture-Gis-master/GestureGis2/ComparisonFeaturefile.arff"));
                insts.setClassIndex(insts.numAttributes() - 1);

                //int percentSplit = 66;

                weka.classifiers.Classifier cl = new weka.classifiers.trees.J48();
                //Console.WriteLine("Performing " + percentSplit + "% split evaluation.");

                //randomize the order of the instances in the dataset.
                //weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                //myRandom.setInputFormat(insts);
                //insts = weka.filters.Filter.useFilter(insts, myRandom);
                int count                 = insts.numInstances();
                int trainSize             = count - 1;
                int testSize              = count - trainSize;
                weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);

                cl.buildClassifier(train);
                //weka.core.Instance current = insts2.instance(0);
                int numCorrect = 0;

                /*for (int i = trainSize; i < insts.numInstances(); i++)
                 * {
                 *  weka.core.Instance currentInst = insts.instance(i);
                 *  double predictedClass = cl.classifyInstance(currentInst);
                 *  if (predictedClass == insts.instance(i).classValue())
                 *      numCorrect++;
                 * }*/
                int index = count - 1;
                weka.core.Instance currentInst    = insts.instance(index);
                double             predictedClass = cl.classifyInstance(currentInst);
                int pre = (int)predictedClass;
                if (predictedClass == insts.instance(index).classValue())
                {
                    numCorrect++;
                }
                //insts.instance(index).classAttribute();
                //insts.attribute(11);
                string s = insts.toString();
                s = s.Substring(s.IndexOf("{") + 1);
                s = s.Substring(0, s.IndexOf("}"));
                s = s.Substring(0, s.Length);
                string[] ae = s.Split(',');

                /*ArrayList arr = new ArrayList();
                 * string path_class = @"D:\final_version\Gesture-Gis-master\GestureGis2\Classfile.txt";
                 * using (StreamReader reader = new StreamReader(path_class))
                 * {
                 *  while (!reader.EndOfStream)
                 *  {
                 *      arr.Add(reader.ReadLine());
                 *  }
                 *  reader.Close();
                 * }*/
                PredictedClassbyWeka = (string)(ae[pre]);
                arrfFile.close();

                //insts.instance(index).attribute(3);

                /*System.Diagnostics.Debug.WriteLine(numCorrect + " out of " + testSize + " correct (" +
                 *         (double)((double)numCorrect / (double)testSize * 100.0) + "%)");
                 * Console.WriteLine(numCorrect + " out of " + testSize + " correct (" +
                 *         (double)((double)numCorrect / (double)testSize * 100.0) + "%)");*/
            }
            catch (java.lang.Exception ex)
            {
                ex.printStackTrace();
            }
        }
Beispiel #29
0
        //static public void classifyLastColumn(){
        //    try { colClass = insts.numAttributes() - 1; }
        //    catch { colClass = 0;  }
        //}

        static public void step_loadInstance()
        {
            insts         = new weka.core.Instances(new java.io.FileReader(TheURL.dm_path_file));
            count_feature = insts.numAttributes() - 1;
        }
Beispiel #30
0
        public override double classifyInstance(weka.core.Instance instance)
        {
            if (m_instances.numInstances() == 0)
            {
                return(2);
            }

            if (m_instances.numAttributes() != instance.numAttributes())
            {
                throw new AssertException("different attribute.");
            }
            int n = instance.numAttributes();
            List <Tuple <int, int> > dist = new List <Tuple <int, int> >();

            for (int i = 0; i < m_instances.numInstances(); ++i)
            {
                int d1 = 0, d2 = 0;
                weka.core.Instance instanceI = m_instances.instance(i);
                for (int j = 0; j < n; ++j)
                {
                    //d += (int)((instanceI.value(j) - instance.value(j)) * (instanceI.value(j) - instance.value(j)));
                    if (instanceI.value(j) != instance.value(j))
                    {
                        d1++;
                    }
                    if (instance.value(j) != 0)
                    {
                        d2++;
                    }
                }
                int c = (int)instanceI.classValue();

                dist.Add(new Tuple <int, int>(d1, c));
            }
            if (dist.Count == 0)
            {
                return(2);
            }

            dist.Sort(new Comparison <Tuple <int, int> >((x, y) =>
            {
                return(x.Item1.CompareTo(y.Item1));
            }));

            int sum = 0, count = 0;

            for (int i = 0; i < dist.Count; ++i)
            {
                if (dist[i].Item1 < 4)
                {
                    sum += dist[i].Item2;
                    count++;
                }
                else
                {
                    break;
                }
            }
            if (count == 0)
            {
                return(2);
            }
            if (count < m_instances.numInstances() / 70)
            {
                return(2);
            }
            return((int)Math.Round((double)sum / count));
        }
Beispiel #31
0
        private void result_Click(object sender, EventArgs e)
        {
            ArrayList algorithms = new ArrayList();

            algorithms.Add("Naive Bayes");
            algorithms.Add("K Nearest Neighbor");
            algorithms.Add("Decision Tree");
            algorithms.Add("Neural Network");
            algorithms.Add("Support Vector Machine");
            ArrayList successPercent = new ArrayList();
            double    res_Naive, res_KNN, res_NN, res_Tree, res_SVM = 0.0;
            string    nameOfAlgo = "";

            //NAIVE BAYES ALGORITHM
            weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader(fileDirectory));

            //CREATIING DYNAMIC GRIDVIEW FOR ADDING NEW INSTANCE
            dataGridView1.ColumnCount   = 2;
            dataGridView1.RowCount      = insts.numAttributes();
            String[,] matrixOfInstances = new String[insts.numInstances(), insts.numAttributes()];



            for (int y = 0; y < insts.numAttributes() - 1; y++)
            {
                dataGridView1.Rows[y].Cells[0].Value = insts.attribute(y).name();
                if (insts.attribute(y).isNominal())
                {
                    //nominalDataValues.Add(insts.attribute(y).toString());
                    string   phrase = insts.attribute(y).toString();
                    string[] first  = phrase.Split('{');

                    string[] second = first[1].Split('}');

                    string[] attributeValues = second[0].Split(',');

                    DataGridViewComboBoxCell comboColumn = new DataGridViewComboBoxCell();

                    foreach (var a in attributeValues)
                    {
                        comboColumn.Items.Add(a);
                    }
                    dataGridView1.Rows[y].Cells[1] = comboColumn;
                }
            }

            insts.setClassIndex(insts.numAttributes() - 1);
            cl_Naive = new weka.classifiers.bayes.NaiveBayes();

            weka.filters.Filter myNominalData = new weka.filters.unsupervised.attribute.Discretize();
            myNominalData.setInputFormat(insts);
            insts = weka.filters.Filter.useFilter(insts, myNominalData);


            //randomize the order of the instances in the dataset.
            weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
            myRandom.setInputFormat(insts);
            insts = weka.filters.Filter.useFilter(insts, myRandom);

            int trainSize = insts.numInstances() * percentSplit / 100;
            int testSize  = insts.numInstances() - trainSize;

            weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);

            cl_Naive.buildClassifier(train);

            string str = cl_Naive.toString();

            int numCorrect = 0;

            for (int i = trainSize; i < insts.numInstances(); i++)
            {
                weka.core.Instance currentInst    = insts.instance(i);
                double             predictedClass = cl_Naive.classifyInstance(currentInst);
                if (predictedClass == insts.instance(i).classValue())
                {
                    numCorrect++;
                }
            }
            res_Naive = (double)((double)numCorrect / (double)testSize * 100.0);
            successPercent.Add(res_Naive);
            //kNN

            weka.core.Instances insts2 = new weka.core.Instances(new java.io.FileReader(fileDirectory));

            insts2.setClassIndex(insts2.numAttributes() - 1);

            cl_Knn = new weka.classifiers.lazy.IBk();

            //Nominal to Binary
            weka.filters.Filter myBinaryData = new weka.filters.unsupervised.attribute.NominalToBinary();
            myBinaryData.setInputFormat(insts2);
            insts2 = weka.filters.Filter.useFilter(insts2, myBinaryData);

            //Normalization
            weka.filters.Filter myNormalized = new weka.filters.unsupervised.instance.Normalize();
            myNormalized.setInputFormat(insts2);
            insts2 = weka.filters.Filter.useFilter(insts2, myNormalized);

            //randomize the order of the instances in the dataset.
            weka.filters.Filter myRandom2 = new weka.filters.unsupervised.instance.Randomize();
            myRandom2.setInputFormat(insts2);
            insts2 = weka.filters.Filter.useFilter(insts2, myRandom2);

            int trainSize2 = insts2.numInstances() * percentSplit / 100;
            int testSize2  = insts2.numInstances() - trainSize2;

            weka.core.Instances train2 = new weka.core.Instances(insts2, 0, trainSize2);

            cl_Knn.buildClassifier(train2);

            string str2 = cl_Knn.toString();

            int numCorrect2 = 0;

            for (int i = trainSize2; i < insts2.numInstances(); i++)
            {
                weka.core.Instance currentInst2   = insts2.instance(i);
                double             predictedClass = cl_Knn.classifyInstance(currentInst2);
                if (predictedClass == insts2.instance(i).classValue())
                {
                    numCorrect2++;
                }
            }
            res_KNN = (double)((double)numCorrect2 / (double)testSize2 * 100.0);
            successPercent.Add(res_KNN);

            //Decision tree
            weka.core.Instances insts3 = new weka.core.Instances(new java.io.FileReader(fileDirectory));

            insts3.setClassIndex(insts3.numAttributes() - 1);

            cl_Tree = new weka.classifiers.trees.J48();



            weka.filters.Filter myNormalized2 = new weka.filters.unsupervised.instance.Normalize();
            myNormalized2.setInputFormat(insts3);
            insts3 = weka.filters.Filter.useFilter(insts3, myNormalized2);


            //randomize the order of the instances in the dataset.
            weka.filters.Filter myRandom3 = new weka.filters.unsupervised.instance.Randomize();
            myRandom3.setInputFormat(insts3);
            insts3 = weka.filters.Filter.useFilter(insts3, myRandom3);

            int trainSize3 = insts3.numInstances() * percentSplit / 100;
            int testSize3  = insts3.numInstances() - trainSize3;

            weka.core.Instances train3 = new weka.core.Instances(insts3, 0, trainSize3);

            cl_Tree.buildClassifier(train3);

            string str3 = cl_Tree.toString();

            int numCorrect3 = 0;

            for (int i = trainSize3; i < insts3.numInstances(); i++)
            {
                weka.core.Instance currentInst3   = insts3.instance(i);
                double             predictedClass = cl_Tree.classifyInstance(currentInst3);
                if (predictedClass == insts3.instance(i).classValue())
                {
                    numCorrect3++;
                }
            }
            res_Tree = (double)((double)numCorrect3 / (double)testSize3 * 100.0);
            successPercent.Add(res_Tree);

            //Neural Network
            weka.core.Instances insts4 = new weka.core.Instances(new java.io.FileReader(fileDirectory));

            insts4.setClassIndex(insts4.numAttributes() - 1);

            cl_NN = new weka.classifiers.functions.MultilayerPerceptron();

            //Nominal to Binary
            weka.filters.Filter myBinaryData2 = new weka.filters.unsupervised.attribute.NominalToBinary();
            myBinaryData2.setInputFormat(insts4);
            insts4 = weka.filters.Filter.useFilter(insts4, myBinaryData2);

            //Normalization
            weka.filters.Filter myNormalized3 = new weka.filters.unsupervised.instance.Normalize();
            myNormalized3.setInputFormat(insts4);
            insts4 = weka.filters.Filter.useFilter(insts4, myNormalized3);

            //randomize the order of the instances in the dataset.
            weka.filters.Filter myRandom4 = new weka.filters.unsupervised.instance.Randomize();
            myRandom4.setInputFormat(insts4);
            insts4 = weka.filters.Filter.useFilter(insts4, myRandom4);

            int trainSize4 = insts4.numInstances() * percentSplit / 100;
            int testSize4  = insts4.numInstances() - trainSize4;

            weka.core.Instances train4 = new weka.core.Instances(insts4, 0, trainSize4);

            cl_NN.buildClassifier(train4);

            string str4 = cl_NN.toString();

            int numCorrect4 = 0;

            for (int i = trainSize4; i < insts4.numInstances(); i++)
            {
                weka.core.Instance currentInst4   = insts4.instance(i);
                double             predictedClass = cl_NN.classifyInstance(currentInst4);
                if (predictedClass == insts4.instance(i).classValue())
                {
                    numCorrect4++;
                }
            }

            res_NN = (double)((double)numCorrect4 / (double)testSize4 * 100.0);
            successPercent.Add(res_NN);

            //SVM
            weka.core.Instances insts5 = new weka.core.Instances(new java.io.FileReader(fileDirectory));

            insts5.setClassIndex(insts5.numAttributes() - 1);

            cl_SVM = new weka.classifiers.functions.SMO();

            //Nominal to Binary
            weka.filters.Filter myBinaryData3 = new weka.filters.unsupervised.attribute.NominalToBinary();
            myBinaryData3.setInputFormat(insts5);
            insts5 = weka.filters.Filter.useFilter(insts5, myBinaryData3);

            //Normalization
            weka.filters.Filter myNormalized4 = new weka.filters.unsupervised.instance.Normalize();
            myNormalized4.setInputFormat(insts5);
            insts5 = weka.filters.Filter.useFilter(insts5, myNormalized4);

            //randomize the order of the instances in the dataset.
            weka.filters.Filter myRandom5 = new weka.filters.unsupervised.instance.Randomize();
            myRandom5.setInputFormat(insts5);
            insts5 = weka.filters.Filter.useFilter(insts5, myRandom5);

            int trainSize5 = insts5.numInstances() * percentSplit / 100;
            int testSize5  = insts5.numInstances() - trainSize5;

            weka.core.Instances train5 = new weka.core.Instances(insts5, 0, trainSize5);

            cl_SVM.buildClassifier(train5);

            string str5 = cl_SVM.toString();

            int numCorrect5 = 0;

            for (int i = trainSize5; i < insts5.numInstances(); i++)
            {
                weka.core.Instance currentInst5   = insts5.instance(i);
                double             predictedClass = cl_SVM.classifyInstance(currentInst5);
                if (predictedClass == insts5.instance(i).classValue())
                {
                    numCorrect5++;
                }
            }
            res_SVM = (double)((double)numCorrect5 / (double)testSize5 * 100.0);
            successPercent.Add(res_SVM);


            for (int i = 0; i < successPercent.Count; i++)
            {
                if ((double)successPercent[i] > max)
                {
                    max   = (double)successPercent[i];
                    count = i + 1;
                }
            }
            for (int i = 0; i < count; i++)
            {
                nameOfAlgo = (string)algorithms[i];
            }

            textBox1.Text = nameOfAlgo + " is the most successful algorithm for this data set." + "(" + max + "%)\n";
        }
Beispiel #32
0
        private void button1_Click(object sender, EventArgs e)
        {
            OpenFileDialog file = new OpenFileDialog();

            if (file.ShowDialog() == DialogResult.OK)
            {
                string filename = file.FileName;
                string filee    = Path.GetFileName(filename);
                bool   attributeType;
                string attributeName      = " ";
                int    numAttributeValue  = 0;
                string attributeValueName = " ";

                textBox1.Text = filee + " chosen succesfully!";

                ///////Decision Tree
                weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader(filename));


                insts.setClassIndex(insts.numAttributes() - 1);

                //find nominal or numeric attributes and create dropbox or textbox
                int numofAttributes = insts.numAttributes() - 1;
                for (int i = 0; i < numofAttributes; i++)
                {
                    attributeType = insts.attribute(i).isNumeric();
                    attributeName = insts.attribute(i).name();
                    dataGridView1.Rows.Add(attributeName);
                    if (attributeType == true)
                    {
                    }
                    else
                    {
                        numAttributeValue = insts.attribute(i).numValues();
                        string[] name = new string[numAttributeValue];
                        for (int j = 0; j < numAttributeValue; j++)
                        {
                            attributeValueName = insts.attribute(i).value(j);
                            name[j]           += attributeValueName;
                        }
                        DataGridViewComboBoxCell combo = new DataGridViewComboBoxCell();
                        combo.DataSource = name.ToList();
                        dataGridView1.Rows[i].Cells[1] = combo;
                    }
                }

                cl = new weka.classifiers.trees.J48();

                textBox2.Text = "Performing " + percentSplit + "% split evaluation.";

                //filling missing values
                weka.filters.Filter missingval = new weka.filters.unsupervised.attribute.ReplaceMissingValues();
                missingval.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, missingval);

                weka.filters.Filter myNormalized = new weka.filters.unsupervised.instance.Normalize();
                myNormalized.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myNormalized);


                //randomize the order of the instances in the dataset.
                weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                myRandom.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myRandom);

                int trainSize             = insts.numInstances() * percentSplit / 100;
                int testSize              = insts.numInstances() - trainSize;
                weka.core.Instances train = new weka.core.Instances(insts, 0, trainSize);

                cl.buildClassifier(train);

                string str = cl.toString();

                int numCorrect = 0;
                for (int i = trainSize; i < insts.numInstances(); i++)
                {
                    weka.core.Instance currentInst    = insts.instance(i);
                    double             predictedClass = cl.classifyInstance(currentInst);
                    if (predictedClass == insts.instance(i).classValue())
                    {
                        numCorrect++;
                    }
                }
                textBox3.Text = numCorrect + " out of " + testSize + " correct (" +
                                (double)((double)numCorrect / (double)testSize * 100.0) + "%)";



                //////////Naive Bayes

                //dosya okuma
                weka.core.Instances insts2 = new weka.core.Instances(new java.io.FileReader(filename));
                insts2.setClassIndex(insts2.numAttributes() - 1);

                //naive bayes
                cl2 = new weka.classifiers.bayes.NaiveBayes();


                //filling missing values
                weka.filters.Filter missingval2 = new weka.filters.unsupervised.attribute.ReplaceMissingValues();
                missingval2.setInputFormat(insts2);
                insts2 = weka.filters.Filter.useFilter(insts2, missingval2);

                //for naive bayes
                weka.filters.Filter discrete2 = new weka.filters.unsupervised.attribute.Discretize();
                discrete2.setInputFormat(insts2);
                insts2 = weka.filters.Filter.useFilter(insts2, discrete2);

                //randomize the order of the instances in the dataset. -ortak
                weka.filters.Filter myRandom2 = new weka.filters.unsupervised.instance.Randomize();
                myRandom2.setInputFormat(insts2);
                insts2 = weka.filters.Filter.useFilter(insts2, myRandom2);

                //ortak
                int trainSize2             = insts2.numInstances() * percentSplit / 100;
                int testSize2              = insts2.numInstances() - trainSize2;
                weka.core.Instances train2 = new weka.core.Instances(insts2, 0, trainSize2);

                cl2.buildClassifier(train2);

                string str2 = cl2.toString();

                int numCorrect2 = 0;
                for (int i = trainSize2; i < insts2.numInstances(); i++)
                {
                    weka.core.Instance currentInst2    = insts2.instance(i);
                    double             predictedClass2 = cl2.classifyInstance(currentInst2);
                    if (predictedClass2 == insts2.instance(i).classValue())
                    {
                        numCorrect2++;
                    }
                }
                textBox4.Text = numCorrect2 + " out of " + testSize2 + " correct (" +
                                (double)((double)numCorrect2 / (double)testSize2 * 100.0) + "%)";


                /////////K-Nearest Neigbour

                //dosya okuma
                weka.core.Instances insts3 = new weka.core.Instances(new java.io.FileReader(filename));
                insts3.setClassIndex(insts3.numAttributes() - 1);

                cl3 = new weka.classifiers.lazy.IBk();


                //filling missing values
                weka.filters.Filter missingval3 = new weka.filters.unsupervised.attribute.ReplaceMissingValues();
                missingval3.setInputFormat(insts3);
                insts3 = weka.filters.Filter.useFilter(insts3, missingval3);

                //Convert to dummy attribute knn,svm,neural network
                weka.filters.Filter dummy3 = new weka.filters.unsupervised.attribute.NominalToBinary();
                dummy3.setInputFormat(insts3);
                insts3 = weka.filters.Filter.useFilter(insts3, dummy3);

                //normalize numeric attribute
                weka.filters.Filter myNormalized3 = new weka.filters.unsupervised.instance.Normalize();
                myNormalized3.setInputFormat(insts3);
                insts3 = weka.filters.Filter.useFilter(insts3, myNormalized3);

                //randomize the order of the instances in the dataset.
                weka.filters.Filter myRandom3 = new weka.filters.unsupervised.instance.Randomize();
                myRandom3.setInputFormat(insts3);
                insts3 = weka.filters.Filter.useFilter(insts3, myRandom3);

                int trainSize3             = insts3.numInstances() * percentSplit / 100;
                int testSize3              = insts3.numInstances() - trainSize3;
                weka.core.Instances train3 = new weka.core.Instances(insts3, 0, trainSize3);

                cl3.buildClassifier(train3);

                string str3 = cl3.toString();

                int numCorrect3 = 0;
                for (int i = trainSize3; i < insts3.numInstances(); i++)
                {
                    weka.core.Instance currentInst3    = insts3.instance(i);
                    double             predictedClass3 = cl3.classifyInstance(currentInst3);
                    if (predictedClass3 == insts3.instance(i).classValue())
                    {
                        numCorrect3++;
                    }
                }
                textBox5.Text = numCorrect3 + " out of " + testSize3 + " correct (" +
                                (double)((double)numCorrect3 / (double)testSize3 * 100.0) + "%)";

                //////////Artificial neural network
                //dosya okuma
                weka.core.Instances insts4 = new weka.core.Instances(new java.io.FileReader(filename));
                insts4.setClassIndex(insts4.numAttributes() - 1);

                cl4 = new weka.classifiers.functions.MultilayerPerceptron();


                //filling missing values
                weka.filters.Filter missingval4 = new weka.filters.unsupervised.attribute.ReplaceMissingValues();
                missingval4.setInputFormat(insts4);
                insts4 = weka.filters.Filter.useFilter(insts4, missingval4);

                //Convert to dummy attribute
                weka.filters.Filter dummy4 = new weka.filters.unsupervised.attribute.NominalToBinary();
                dummy4.setInputFormat(insts4);
                insts4 = weka.filters.Filter.useFilter(insts4, dummy4);

                //normalize numeric attribute
                weka.filters.Filter myNormalized4 = new weka.filters.unsupervised.instance.Normalize();
                myNormalized4.setInputFormat(insts4);
                insts4 = weka.filters.Filter.useFilter(insts4, myNormalized4);

                //randomize the order of the instances in the dataset.
                weka.filters.Filter myRandom4 = new weka.filters.unsupervised.instance.Randomize();
                myRandom4.setInputFormat(insts4);
                insts4 = weka.filters.Filter.useFilter(insts4, myRandom4);

                int trainSize4             = insts4.numInstances() * percentSplit / 100;
                int testSize4              = insts4.numInstances() - trainSize4;
                weka.core.Instances train4 = new weka.core.Instances(insts4, 0, trainSize4);

                cl4.buildClassifier(train4);

                string str4 = cl4.toString();

                int numCorrect4 = 0;
                for (int i = trainSize4; i < insts4.numInstances(); i++)
                {
                    weka.core.Instance currentInst4    = insts4.instance(i);
                    double             predictedClass4 = cl4.classifyInstance(currentInst4);
                    if (predictedClass4 == insts4.instance(i).classValue())
                    {
                        numCorrect4++;
                    }
                }

                textBox6.Text = numCorrect4 + " out of " + testSize4 + " correct (" +
                                (double)((double)numCorrect4 / (double)testSize4 * 100.0) + "%)";



                ///////Support Vector Machine
                // dosya okuma
                weka.core.Instances insts5 = new weka.core.Instances(new java.io.FileReader(filename));
                insts5.setClassIndex(insts5.numAttributes() - 1);

                cl5 = new weka.classifiers.functions.SMO();


                //filling missing values
                weka.filters.Filter missingval5 = new weka.filters.unsupervised.attribute.ReplaceMissingValues();
                missingval5.setInputFormat(insts5);
                insts5 = weka.filters.Filter.useFilter(insts5, missingval5);

                //Convert to dummy attribute
                weka.filters.Filter dummy5 = new weka.filters.unsupervised.attribute.NominalToBinary();
                dummy5.setInputFormat(insts5);
                insts5 = weka.filters.Filter.useFilter(insts5, dummy5);

                //normalize numeric attribute
                weka.filters.Filter myNormalized5 = new weka.filters.unsupervised.instance.Normalize();
                myNormalized5.setInputFormat(insts5);
                insts5 = weka.filters.Filter.useFilter(insts5, myNormalized5);

                //randomize the order of the instances in the dataset.
                weka.filters.Filter myRandom5 = new weka.filters.unsupervised.instance.Randomize();
                myRandom5.setInputFormat(insts5);
                insts5 = weka.filters.Filter.useFilter(insts5, myRandom5);

                int trainSize5             = insts5.numInstances() * percentSplit / 100;
                int testSize5              = insts5.numInstances() - trainSize5;
                weka.core.Instances train5 = new weka.core.Instances(insts5, 0, trainSize5);

                cl5.buildClassifier(train5);

                string str5 = cl5.toString();

                int numCorrect5 = 0;
                for (int i = trainSize5; i < insts5.numInstances(); i++)
                {
                    weka.core.Instance currentInst5    = insts5.instance(i);
                    double             predictedClass5 = cl5.classifyInstance(currentInst5);
                    if (predictedClass5 == insts5.instance(i).classValue())
                    {
                        numCorrect5++;
                    }
                }

                textBox7.Text = numCorrect5 + " out of " + testSize5 + " correct (" +
                                (double)((double)numCorrect5 / (double)testSize5 * 100.0) + "%)";



                string result1 = textBox3.Text;
                string output1 = result1.Split('(', ')')[1];
                output1 = output1.Remove(output1.Length - 1);
                double r1 = Convert.ToDouble(output1);

                string result2 = textBox4.Text;
                string output2 = result2.Split('(', ')')[1];
                output2 = output2.Remove(output2.Length - 1);
                double r2 = Convert.ToDouble(output2);

                string result3 = textBox5.Text;
                string output3 = result3.Split('(', ')')[1];
                output3 = output3.Remove(output3.Length - 1);
                double r3 = Convert.ToDouble(output3);

                string result4 = textBox6.Text;
                string output4 = result4.Split('(', ')')[1];
                output4 = output4.Remove(output4.Length - 1);
                double r4 = Convert.ToDouble(output4);

                string result5 = textBox7.Text;
                string output5 = result5.Split('(', ')')[1];
                output5 = output5.Remove(output5.Length - 1);
                double r5 = Convert.ToDouble(output5);


                double[] max_array = new double[] { r1, r2, r3, r4, r5 };

                double max = max_array.Max();
                if (r1 == max)
                {
                    textBox8.Text = "Best Algoritm is Decision Tree Algorithm ";
                }
                else if (r2 == max)
                {
                    textBox8.Text = "Best Algoritm is Naive Bayes Algorithm ";
                }
                else if (r3 == max)
                {
                    textBox8.Text = "Best Algoritm is K-Nearest Neighbour Algorithm ";
                }
                else if (r4 == max)
                {
                    textBox8.Text = "Best Algoritm is Artificial Neural Network Algorithm ";
                }
                else if (r5 == max)
                {
                    textBox8.Text = "Best Algoritm is Support Vector Machine Algorithm ";
                }
            }
        }
    // Test the classification result of each map that a user played,
    // with the data available as if they were playing through it
    public static void classifyTest(String dataString, String playerID)
    {
        String results = "";

        try {
            java.io.StringReader   stringReader = new java.io.StringReader(dataString);
            java.io.BufferedReader buffReader   = new java.io.BufferedReader(stringReader);

            /* NOTE THAT FOR NAIVE BAYES ALL WEIGHTS CAN BE = 1*/
            //weka.core.converters.ConverterUtils.DataSource source = new weka.core.converters.ConverterUtils.DataSource("iris.arff");
            weka.core.Instances data = new weka.core.Instances(buffReader);             //source.getDataSet();
            // setting class attribute if the data format does not provide this information
            // For example, the XRFF format saves the class attribute information as well
            if (data.classIndex() == -1)
            {
                data.setClassIndex(data.numAttributes() - 1);
            }

            weka.classifiers.Classifier cl;
            for (int i = 3; i < data.numInstances(); i++)
            {
                cl = new weka.classifiers.bayes.NaiveBayes();
                //cl = new weka.classifiers.trees.J48();
                //cl = new weka.classifiers.lazy.IB1();
                //cl = new weka.classifiers.functions.MultilayerPerceptron();
                ((weka.classifiers.functions.MultilayerPerceptron)cl).setHiddenLayers("12");

                weka.core.Instances subset = new weka.core.Instances(data, 0, i);
                cl.buildClassifier(subset);

                weka.classifiers.Evaluation eval = new weka.classifiers.Evaluation(subset);
                eval.crossValidateModel(cl, subset, 3, new java.util.Random(1));
                results = results + eval.pctCorrect();                 // For accuracy measurement
                /* For Mathews Correlation Coefficient */
                //double TP = eval.numTruePositives(1);
                //double FP = eval.numFalsePositives(1);
                //double TN = eval.numTrueNegatives(1);
                //double FN = eval.numFalseNegatives(1);
                //double correlationCoeff = ((TP*TN)-(FP*FN))/Math.Sqrt((TP+FP)*(TP+FN)*(TN+FP)*(TN+FN));
                //results = results + correlationCoeff;
                if (i != data.numInstances() - 1)
                {
                    results = results + ", ";
                }
                if (i == data.numInstances() - 1)
                {
                    Debug.Log("Player: " + playerID + ", Num Maps: " + data.numInstances() + ", AUC: " + eval.areaUnderROC(1));
                }
            }
        } catch (java.lang.Exception ex)
        {
            Debug.LogError(ex.getMessage());
        }
        // Write values to file for a matlab read
        // For accuracy
        StreamWriter writer = new StreamWriter("DataForMatlab/" + playerID + "_CrossFoldValidations_NeuralNet.txt");

        //StreamWriter writer = new StreamWriter("DataForMatlab/"+playerID+"_CrossFoldCorrCoeff.txt"); // For mathews cc
        writer.WriteLine(results);
        writer.Close();
        Debug.Log(playerID + " has been written to file");
    }
Beispiel #34
0
        private void Classify(string path)
        {
            readyToTest = false; // initialize flag

            // Try reading file, if failed exit function
            insts = ReadFile(path);
            if (insts == null)
            {
                // Error occured reading file, display error message
                MessageBox.Show("Instances are null!", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
                return;
            }

            var form = Form.ActiveForm as Form1; // get the current form object

            // Reset UI and lists
            succesRates.Clear();
            classifiers.Clear();
            form.inputPanel.Controls.Clear();
            inputObjects.Clear();
            form.textMostSuccessful.Text = "";
            form.testResult.Text         = "";

            // Place attribute inputs on UI, max 18, numeric and nominal
            int offsetV = 60;
            int offsetH = 10;
            int width   = 75;
            int height  = 30;

            for (int i = 0; i < insts.numAttributes() - 1; i++)
            {
                // Create and place label
                Label label = new Label();
                label.Width    = width;
                label.Height   = height;
                label.Text     = insts.attribute(i).name();
                label.Parent   = form.inputPanel;
                label.Location = new Point((width * (i % 8)) + offsetH, (height * (i / 8)) + (offsetV * (i / 8)));

                // NumericUpDown for numeric and ComboBox for nominal values
                if (insts.attribute(i).isNumeric())
                {
                    NumericUpDown nud = new NumericUpDown();
                    nud.Width    = width - 10;
                    nud.Height   = height;
                    nud.Parent   = form.inputPanel;
                    nud.Location = new Point((width * (i % 8)) + offsetH, (height * (i / 8)) + (offsetV * (i / 8)) + height);
                    inputObjects.Add(new UserInput(nud));
                }
                else
                {
                    string[] values   = insts.attribute(i).toString().Split('{', '}')[1].Split(',');
                    ComboBox comboBox = new ComboBox();
                    comboBox.DataSource = values;
                    comboBox.Width      = width - 10;
                    comboBox.Height     = height;
                    comboBox.Parent     = form.inputPanel;
                    comboBox.Location   = new Point((width * (i % 8)) + offsetH, (height * (i / 8)) + (offsetV * (i / 8)) + height);
                    inputObjects.Add(new UserInput(comboBox));
                }
            }

            // Set train and test sizes
            trainSize = insts.numInstances() * percentSplit / 100;
            testSize  = insts.numInstances() - trainSize;

            // Set target attribute
            insts.setClassIndex(insts.numAttributes() - 1);

            // Randomize
            weka.filters.Filter rndFilter = new weka.filters.unsupervised.instance.Randomize();
            rndFilter.setInputFormat(insts);
            insts = weka.filters.Filter.useFilter(insts, rndFilter);


            // Start threads for each method
            Thread t_SuccessNb = new Thread(() => CalculateSuccessForNb(insts));

            t_SuccessNb.Start();

            Thread t_SuccessKn = new Thread(() => CalculateSuccessForKn(insts));

            t_SuccessKn.Start();

            Thread t_SuccessDt = new Thread(() => CalculateSuccessForDt(insts));

            t_SuccessDt.Start();

            Thread t_SuccessAnn = new Thread(() => CalculateSuccessForAnn(insts));

            t_SuccessAnn.Start();

            Thread t_SuccessSvm = new Thread(() => CalculateSuccessForSvm(insts));

            t_SuccessSvm.Start();

            // Wait for threads
            t_SuccessNb.Join();
            t_SuccessKn.Join();
            t_SuccessDt.Join();
            t_SuccessAnn.Join();
            t_SuccessSvm.Join();

            // Find out which algorithm has the best success rate
            foreach (var item in succesRates)
            {
                if (highestSuccessRate.Equals(default(KeyValuePair <Classifier, double>)) || highestSuccessRate.Value < item.Value)
                {
                    highestSuccessRate = item;
                }
            }
            form.textMostSuccessful.Text = "Most successful algorithm is " + highestSuccessRate.Key + " and it will be used for testing.";
            readyToTest = true; // switch flag
        }
Beispiel #35
0
		/// <summary> Gets the index of the instance with the closest threshold value to the
		/// desired target
		/// 
		/// </summary>
		/// <param name="tcurve">a set of instances that have been generated by this class
		/// </param>
		/// <param name="threshold">the target threshold
		/// </param>
		/// <returns> the index of the instance that has threshold closest to
		/// the target, or -1 if this could not be found (i.e. no data, or
		/// bad threshold target)
		/// </returns>
		public static int getThresholdInstance(Instances tcurve, double threshold)
		{
			
			if (!RELATION_NAME.Equals(tcurve.relationName()) || (tcurve.numInstances() == 0) || (threshold < 0) || (threshold > 1.0))
			{
				return - 1;
			}
			if (tcurve.numInstances() == 1)
			{
				return 0;
			}
			double[] tvals = tcurve.attributeToDoubleArray(tcurve.numAttributes() - 1);
			int[] sorted = Utils.sort(tvals);
			return binarySearch(sorted, tvals, threshold);
		}
Beispiel #36
0
    // Test the classification result of each map that a user played,
    // with the data available as if they were playing through it
    public static void classifyTest(String dataString, String playerID)
    {
        try {
            java.io.StringReader   stringReader = new java.io.StringReader(dataString);
            java.io.BufferedReader buffReader   = new java.io.BufferedReader(stringReader);

            /* NOTE THAT FOR NAIVE BAYES ALL WEIGHTS CAN BE = 1*/
            //weka.core.converters.ConverterUtils.DataSource source = new weka.core.converters.ConverterUtils.DataSource("iris.arff");
            weka.core.Instances thisData = new weka.core.Instances(buffReader);             //source.getDataSet();
            if (thisData.classIndex() == -1)
            {
                thisData.setClassIndex(thisData.numAttributes() - 1);
            }

            weka.core.Instances thisUniqueData = new weka.core.Instances(thisData);
            if (thisUniqueData.classIndex() == -1)
            {
                thisUniqueData.setClassIndex(thisUniqueData.numAttributes() - 1);
            }
            thisUniqueData.delete();

            if (allUniqueData == null)
            {
                allUniqueData = new weka.core.Instances(thisData);
                if (allUniqueData.classIndex() == -1)
                {
                    allUniqueData.setClassIndex(allUniqueData.numAttributes() - 1);
                }
                allUniqueData.delete();
            }

            weka.core.InstanceComparator com = new weka.core.InstanceComparator(false);

            for (int i = 0; i < thisData.numInstances(); i++)
            {
                bool dup = false;
                for (int j = 0; j < allUniqueData.numInstances(); j++)
                {
                    if (com.compare(thisData.instance(i), allUniqueData.instance(j)) == 0)
                    {
                        Debug.Log("Duplicate found!");
                        dup = true;
                        break;
                    }
                }
                if (!dup)
                {
                    allUniqueData.add(thisData.instance(i));
                }
                else
                {
                    dupInstances++;
                }
            }

            for (int i = 0; i < thisData.numInstances(); i++)
            {
                bool dup = false;
                for (int j = 0; j < thisUniqueData.numInstances(); j++)
                {
                    if (com.compare(thisData.instance(i), thisUniqueData.instance(j)) == 0)
                    {
                        Debug.Log("Duplicate found!");
                        dup = true;
                        break;
                    }
                }
                if (!dup)
                {
                    thisUniqueData.add(thisData.instance(i));
                }
                else
                {
                    dupInstancesSamePlayer++;
                }
            }


            //Debug.Log("All Data Instance Count = " + thisData.numInstances());
            //Debug.Log("Unique Data Instance Count = " + thisUniqueData.numInstances());
            //Debug.Log("Done!");
        } catch (java.lang.Exception ex)
        {
            Debug.LogError(ex.getMessage());
        }
    }
Beispiel #37
0
        private void btnLoadStep_Click(object sender, EventArgs e)
        {
            if (m_loadStepInstances == null)
            {
                using (OpenFileDialog d = new OpenFileDialog())
                {
                    d.Filter = "Arff File|*.arff";
                    if (d.ShowDialog() == System.Windows.Forms.DialogResult.OK)
                    {
                        m_loadStepInstances = new weka.core.Instances(new java.io.BufferedReader(new java.io.FileReader(d.FileName)));
                        m_loadStepInstances.setClassIndex(m_loadStepInstances.numAttributes() - 1);

                        clear_all();
                    }
                }
            }
            else
            {
                for (int i = m_loadStepIdx; i < m_loadStepInstances.numInstances(); ++i)
                {
                    var ins = m_loadStepInstances.instance(i);
                    var p = new valuePoint(ins.value(0), ins.value(1), (int)ins.classValue());
                    if (p.x < 0 || p.x >= 1 || p.y < 0 || p.y >= 1)
                        continue;

                    point_list.Add(p);

                    draw_point(p);

                    m_loadStepIdx = i + 1;
                    if (i % 1000 == 0)
                    {
                        break;
                    }
                }

                pictureBox1.Invalidate();
                if (m_loadStepIdx == m_loadStepInstances.numInstances())
                {
                    m_loadStepIdx = 0;
                    m_loadStepInstances = null;
                }
            }
        }
        private void btnBrowse_Click(object sender, EventArgs e)
        {
            panel1.Controls.Clear();
            panel2.Controls.Clear();
            testValues          = new List <object>();
            classes             = new List <string>();
            algoritmName        = String.Empty;
            SuccessfulAlgorithm = new List <AlgorithmModel>();
            labelNames          = new List <string>();
            DialogResult result = openFileDialog.ShowDialog();

            if (result == DialogResult.OK)
            {
                txtPath.Text = openFileDialog.FileName;
            }


            staticInsts = new weka.core.Instances(new java.io.FileReader(txtPath.Text));

            for (int i = 0; i < staticInsts.attribute(staticInsts.numAttributes() - 1).numValues(); i++)
            {
                classes.Add(staticInsts.attribute(staticInsts.numAttributes() - 1).value(i));
            }

            AlgoritmAccurancy(staticInsts, new weka.classifiers.bayes.NaiveBayes(), "Naive Bayes", true);
            AlgoritmAccurancy(staticInsts, new weka.classifiers.lazy.IBk(3), "KNN with k = 3", false);
            AlgoritmAccurancy(staticInsts, new weka.classifiers.trees.RandomForest(), "Random Forest");
            AlgoritmAccurancy(staticInsts, new weka.classifiers.trees.RandomTree(), "Random Tree");
            AlgoritmAccurancy(staticInsts, new weka.classifiers.trees.J48(), "J48");

            pointY = 20;
            pointX = 20;

            for (int i = 0; i < staticInsts.numAttributes() - 1; i++)
            {
                if (staticInsts.attribute(i).numValues() == 0)
                {
                    pointX = 0;
                    string attName       = staticInsts.attribute(i).name(); // Bunlar numeric textbox aç
                    Label  attributeName = new Label();
                    attributeName.Size = new Size(70, 20);
                    attributeName.Text = attName + "\t :";
                    labelNames.Add(attributeName.Text);
                    attributeName.Location = new Point(pointX, pointY);
                    panel1.Controls.Add(attributeName);

                    pointX += 70;
                    TextBox txtValue = new TextBox();
                    txtValue.Location = new Point(pointX, pointY);
                    panel1.Controls.Add(txtValue);
                    panel1.Show();
                    pointY += 30;
                    testValues.Add(txtValue);
                }
                else
                {
                    pointX = 0;
                    string attName       = staticInsts.attribute(i).name(); // Bunlar numeric textbox aç
                    Label  attributeName = new Label();
                    attributeName.Size = new Size(70, 20);
                    attributeName.Text = attName + "\t :";
                    labelNames.Add(attributeName.Text);
                    attributeName.Location = new Point(pointX, pointY);
                    panel1.Controls.Add(attributeName);
                    pointX += 70;

                    ComboBox cb = new ComboBox();
                    cb.DropDownStyle = ComboBoxStyle.DropDownList;
                    cb.Location      = new Point(pointX, pointY);
                    List <string> items = new List <string>();
                    for (int j = 0; j < staticInsts.attribute(i).numValues(); j++)
                    {
                        items.Add(staticInsts.attribute(i).value(j).ToString()); // Bu gelen valueları dropdowna koy
                    }
                    cb.Items.AddRange(items.ToArray());
                    cb.SelectedIndex = 0;
                    panel1.Controls.Add(cb);
                    panel1.Show();
                    pointY += 30;
                    testValues.Add(cb);
                }
            }

            double maxRatio = Double.MinValue;

            foreach (var item in SuccessfulAlgorithm)
            {
                if (item.SuccessRatio > maxRatio)
                {
                    maxRatio     = item.SuccessRatio;
                    algoritmName = item.AlgorithName;
                    predictor    = item.TrainModel.classifier;
                }
            }
            string _maxRatio = string.Format("{0:0.00}", maxRatio);

            lblSuccessulAlgorithm.Text = "The most Successful Algoritm is " + algoritmName + " and the ratio of accurancy is %" + _maxRatio;

            Button btn = new Button();

            btn.Click    += Btn_Click;
            btn.Location  = new Point(pointX, pointY);
            btn.Size      = new Size(80, 20);
            btn.Text      = "DISCOVER";
            btn.BackColor = Color.White;
            panel1.Controls.Add(btn);
            panel1.Show();
        }
Beispiel #39
0
        private weka.core.Instances CreateEmptyInstances()
        {
            var atts = new java.util.ArrayList();
            atts.add(new weka.core.Attribute("x"));
            atts.add(new weka.core.Attribute("y"));

            if (!ckbClassIsNominal.Checked)
            {
                atts.add(new weka.core.Attribute("v"));
            }
            else
            {
                // - nominal
                var attVals = new java.util.ArrayList();
                //for(int i=0; i<MAXCLASSNUM; ++i)
                //    attVals.add(i.ToString());
                attVals.add("0");
                attVals.add("1");
                atts.add(new weka.core.Attribute("v", attVals));
            }

            weka.core.Instances data = new weka.core.Instances("MyRelation", atts, 0);
            data.setClassIndex(data.numAttributes() - 1);

            return data;
        }
Beispiel #40
0
        private void button_Discover_Click(object sender, EventArgs e)
        {
            String       s_newInstance = "";
            StreamReader sr            = new StreamReader(fileDirectory);
            StreamWriter sw            = new StreamWriter(@"test.arff", true);
            String       newDirectory  = "test.arff"; // for algortihms
            string       line          = "";
            string       comp          = "@data";
            string       comp2         = "@DATA";

            line = sr.ReadLine();
            do
            {
                sw.WriteLine(line);
                if (line == comp || line == comp2)
                {
                    break;
                }
            } while ((line = sr.ReadLine()) != null);

            for (int i = 0; i < dataGridView1.Rows.Count - 1; i++)
            {
                s_newInstance += (String)dataGridView1.Rows[i].Cells[1].Value + ","; //değiştir
            }
            s_newInstance += "?";
            sw.WriteLine(s_newInstance);

            sr.Close();
            sw.Close();

            switch (count)
            {
            case 1:
                weka.core.Instances insts = new weka.core.Instances(new java.io.FileReader(newDirectory));
                insts.setClassIndex(insts.numAttributes() - 1);

                weka.filters.Filter myNominalData = new weka.filters.unsupervised.attribute.Discretize();
                myNominalData.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myNominalData);

                //randomize the order of the instances in the dataset.
                weka.filters.Filter myRandom = new weka.filters.unsupervised.instance.Randomize();
                myRandom.setInputFormat(insts);
                insts = weka.filters.Filter.useFilter(insts, myRandom);

                double predictedClass = cl_Naive.classifyInstance(insts.instance(0));
                Console.WriteLine("hey", insts.instance(0));
                textBox3.Text = insts.classAttribute().value(Convert.ToInt32(predictedClass));

                break;

            case 2:
                weka.core.Instances insts2 = new weka.core.Instances(new java.io.FileReader(fileDirectory));

                insts2.setClassIndex(insts2.numAttributes() - 1);

                //Nominal to Binary
                weka.filters.Filter myBinaryData = new weka.filters.unsupervised.attribute.NominalToBinary();
                myBinaryData.setInputFormat(insts2);
                insts2 = weka.filters.Filter.useFilter(insts2, myBinaryData);

                //Normalization
                weka.filters.Filter myNormalized = new weka.filters.unsupervised.instance.Normalize();
                myNormalized.setInputFormat(insts2);
                insts2 = weka.filters.Filter.useFilter(insts2, myNormalized);

                //randomize the order of the instances in the dataset.
                weka.filters.Filter myRandom2 = new weka.filters.unsupervised.instance.Randomize();
                myRandom2.setInputFormat(insts2);
                insts2 = weka.filters.Filter.useFilter(insts2, myRandom2);

                double predictedClass2 = cl_Knn.classifyInstance(insts2.instance(0));
                textBox3.Text = insts2.classAttribute().value(Convert.ToInt32(predictedClass2));
                break;

            case 3:
                weka.core.Instances insts3 = new weka.core.Instances(new java.io.FileReader(newDirectory));

                insts3.setClassIndex(insts3.numAttributes() - 1);
                weka.filters.Filter myNormalized2 = new weka.filters.unsupervised.instance.Normalize();
                myNormalized2.setInputFormat(insts3);
                insts3 = weka.filters.Filter.useFilter(insts3, myNormalized2);


                //randomize the order of the instances in the dataset.
                weka.filters.Filter myRandom3 = new weka.filters.unsupervised.instance.Randomize();
                myRandom3.setInputFormat(insts3);
                insts3 = weka.filters.Filter.useFilter(insts3, myRandom3);

                double predictedClass3 = cl_Tree.classifyInstance(insts3.instance(0));
                textBox3.Text = insts3.classAttribute().value(Convert.ToInt32(predictedClass3));
                break;

            case 4:
                weka.core.Instances insts4 = new weka.core.Instances(new java.io.FileReader(newDirectory));
                insts4.setClassIndex(insts4.numAttributes() - 1);
                //cl = new weka.classifiers.functions.MultilayerPerceptron();

                //Nominal to Binary
                weka.filters.Filter myBinaryData2 = new weka.filters.unsupervised.attribute.NominalToBinary();
                myBinaryData2.setInputFormat(insts4);
                insts4 = weka.filters.Filter.useFilter(insts4, myBinaryData2);

                //Normalization
                weka.filters.Filter myNormalized3 = new weka.filters.unsupervised.instance.Normalize();
                myNormalized3.setInputFormat(insts4);
                insts4 = weka.filters.Filter.useFilter(insts4, myNormalized3);

                //randomize the order of the instances in the dataset.
                weka.filters.Filter myRandom4 = new weka.filters.unsupervised.instance.Randomize();
                myRandom4.setInputFormat(insts4);
                insts4 = weka.filters.Filter.useFilter(insts4, myRandom4);

                double predictedClass4 = cl_NN.classifyInstance(insts4.instance(0));
                textBox3.Text = insts4.classAttribute().value(Convert.ToInt32(predictedClass4));

                break;

            case 5:
                weka.core.Instances insts5 = new weka.core.Instances(new java.io.FileReader(newDirectory));

                insts5.setClassIndex(insts5.numAttributes() - 1);


                //Nominal to Binary
                weka.filters.Filter myBinaryData3 = new weka.filters.unsupervised.attribute.NominalToBinary();
                myBinaryData3.setInputFormat(insts5);
                insts5 = weka.filters.Filter.useFilter(insts5, myBinaryData3);

                //Normalization
                weka.filters.Filter myNormalized4 = new weka.filters.unsupervised.instance.Normalize();
                myNormalized4.setInputFormat(insts5);
                insts5 = weka.filters.Filter.useFilter(insts5, myNormalized4);

                //randomize the order of the instances in the dataset.
                weka.filters.Filter myRandom5 = new weka.filters.unsupervised.instance.Randomize();
                myRandom5.setInputFormat(insts5);
                insts5 = weka.filters.Filter.useFilter(insts5, myRandom5);

                double predictedClass5 = cl_SVM.classifyInstance(insts5.instance(0));
                textBox3.Text = insts5.classAttribute().value(Convert.ToInt32(predictedClass5));
                break;

            default:
                textBox3.Text = "Error!";
                break;
            }
        }