Ejemplo n.º 1
0
        public void TestConcate()
        {
            Matrix<float> mat = new Matrix<float>(30, 40);
             mat.SetRandUniform(new MCvScalar(0), new MCvScalar(255));

             Matrix<float> m1 = mat.GetSubRect(new Rectangle(0, 0, mat.Cols, 20));
             Matrix<float> m2 = mat.GetSubRect(new Rectangle(0, 20, mat.Cols, mat.Rows - 20));
             Matrix<float> mat2 = m1.ConcateVertical(m2);
             Assert.IsTrue(mat.Equals(mat2));

             Matrix<float> m3 = mat.GetSubRect(new Rectangle(0, 0, 10, mat.Rows));
             Matrix<float> m4 = mat.GetSubRect(new Rectangle(10, 0, mat.Cols - 10, mat.Rows));
             Matrix<float> mat3 = m3.ConcateHorizontal(m4);
             Assert.IsTrue(mat.Equals(mat3));

             Matrix<float> m5 = mat.GetRows(0, 5, 1);
             Matrix<float> m6 = mat.GetRows(5, 6, 1);
             Matrix<float> m7 = mat.GetRows(6, mat.Rows, 1);
             Assert.IsTrue(mat.RemoveRows(5, 6).Equals(m5.ConcateVertical(m7)));
             Assert.IsTrue(mat.RemoveRows(0, 1).Equals(mat.GetRows(1, mat.Rows, 1)));
             Assert.IsTrue(mat.RemoveRows(mat.Rows - 1, mat.Rows).Equals(mat.GetRows(0, mat.Rows - 1, 1)));
        }
Ejemplo n.º 2
0
        public void TestSVM()
        {
            int trainSampleCount = 150;
             int sigma = 60;

             #region Generate the training data and classes

             Matrix<float> trainData = new Matrix<float>(trainSampleCount, 2);
             Matrix<float> trainClasses = new Matrix<float>(trainSampleCount, 1);

             Image<Bgr, Byte> img = new Image<Bgr, byte>(500, 500);

             Matrix<float> sample = new Matrix<float>(1, 2);

             Matrix<float> trainData1 = trainData.GetRows(0, trainSampleCount / 3, 1);
             trainData1.GetCols(0, 1).SetRandNormal(new MCvScalar(100), new MCvScalar(sigma));
             trainData1.GetCols(1, 2).SetRandNormal(new MCvScalar(300), new MCvScalar(sigma));

             Matrix<float> trainData2 = trainData.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
             trainData2.SetRandNormal(new MCvScalar(400), new MCvScalar(sigma));

             Matrix<float> trainData3 = trainData.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
             trainData3.GetCols(0, 1).SetRandNormal(new MCvScalar(300), new MCvScalar(sigma));
             trainData3.GetCols(1, 2).SetRandNormal(new MCvScalar(100), new MCvScalar(sigma));

             Matrix<float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount / 3, 1);
             trainClasses1.SetValue(1);
             Matrix<float> trainClasses2 = trainClasses.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
             trainClasses2.SetValue(2);
             Matrix<float> trainClasses3 = trainClasses.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
             trainClasses3.SetValue(3);

             #endregion

             using (SVM model = new SVM())
             {
            SVMParams p = new SVMParams();
            p.KernelType = Emgu.CV.ML.MlEnum.SVM_KERNEL_TYPE.LINEAR;
            p.SVMType = Emgu.CV.ML.MlEnum.SVM_TYPE.C_SVC;
            p.C = 1;
            p.TermCrit = new MCvTermCriteria(100, 0.00001);

            //bool trained = model.Train(trainData, trainClasses, null, null, p);
            bool trained = model.TrainAuto(trainData, trainClasses, null, null, p.MCvSVMParams, 5);

            model.Save("svmModel.xml");

            for (int i = 0; i < img.Height; i++)
            {
               for (int j = 0; j < img.Width; j++)
               {
                  sample.Data[0, 0] = j;
                  sample.Data[0, 1] = i;

                  float response = model.Predict(sample);

                  img[i, j] =
                     response == 1 ? new Bgr(90, 0, 0) :
                     response == 2 ? new Bgr(0, 90, 0) :
                     new Bgr(0, 0, 90);
               }
            }

            int c = model.GetSupportVectorCount();
            for (int i = 0; i < c; i++)
            {
               float[] v = model.GetSupportVector(i);
               PointF p1 = new PointF(v[0], v[1]);
               img.Draw(new CircleF(p1, 4), new Bgr(128, 128, 128), 2);
            }
             }

             // display the original training samples
             for (int i = 0; i < (trainSampleCount / 3); i++)
             {
            PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
            img.Draw(new CircleF(p1, 2.0f), new Bgr(255, 100, 100), -1);
            PointF p2 = new PointF(trainData2[i, 0], trainData2[i, 1]);
            img.Draw(new CircleF(p2, 2.0f), new Bgr(100, 255, 100), -1);
            PointF p3 = new PointF(trainData3[i, 0], trainData3[i, 1]);
            img.Draw(new CircleF(p3, 2.0f), new Bgr(100, 100, 255), -1);
             }
        }
Ejemplo n.º 3
0
        public void TestRTreesLetterRecognition()
        {
            Matrix<float> data, response;
             ReadLetterRecognitionData(out data, out response);

             int trainingSampleCount = (int)(data.Rows * 0.8);

             Matrix<Byte> varType = new Matrix<byte>(data.Cols + 1, 1);
             varType.SetValue((byte)MlEnum.VAR_TYPE.NUMERICAL); //the data is numerical
             varType[data.Cols, 0] = (byte) MlEnum.VAR_TYPE.CATEGORICAL; //the response is catagorical

             Matrix<byte> sampleIdx = new Matrix<byte>(data.Rows, 1);
             using (Matrix<byte> sampleRows = sampleIdx.GetRows(0, trainingSampleCount, 1))
            sampleRows.SetValue(255);

             MCvRTParams param = new MCvRTParams();
             param.maxDepth = 10;
             param.minSampleCount = 10;
             param.regressionAccuracy = 0.0f;
             param.useSurrogates = false;
             param.maxCategories = 15;
             param.priors = IntPtr.Zero;
             param.calcVarImportance = true;
             param.nactiveVars = 4;
             param.termCrit = new MCvTermCriteria(100, 0.01f);
             param.termCrit.type = Emgu.CV.CvEnum.TERMCRIT.CV_TERMCRIT_ITER;

             using (RTrees forest = new RTrees())
             {
            bool success = forest.Train(
               data,
               Emgu.CV.ML.MlEnum.DATA_LAYOUT_TYPE.ROW_SAMPLE,
               response,
               null,
               sampleIdx,
               varType,
               null,
               param);

            if (!success) return;

            double trainDataCorrectRatio = 0;
            double testDataCorrectRatio = 0;
            for (int i = 0; i < data.Rows; i++)
            {
               using (Matrix<float> sample = data.GetRow(i))
               {
                  double r = forest.Predict(sample, null);
                  r = Math.Abs(r - response[i, 0]);
                  if (r < 1.0e-5)
                  {
                     if (i < trainingSampleCount)
                        trainDataCorrectRatio++;
                     else
                        testDataCorrectRatio++;
                  }
               }
            }

            trainDataCorrectRatio /= trainingSampleCount;
            testDataCorrectRatio /= (data.Rows - trainingSampleCount);

            StringBuilder builder = new StringBuilder("Variable Importance: ");
            using (Matrix<float> varImportance = forest.VarImportance)
            {
               for (int i = 0; i < varImportance.Cols; i++)
               {
                  builder.AppendFormat("{0} ", varImportance[0, i]);
               }
            }

            Trace.WriteLine(String.Format("Prediction accuracy for training data :{0}%", trainDataCorrectRatio*100));
            Trace.WriteLine(String.Format("Prediction accuracy for test data :{0}%", testDataCorrectRatio*100));
            Trace.WriteLine(builder.ToString());
             }
        }
Ejemplo n.º 4
0
        public void TestNormalBayesClassifier()
        {
            Bgr[] colors = new Bgr[] {
            new Bgr(0, 0, 255),
            new Bgr(0, 255, 0),
            new Bgr(255, 0, 0)};
             int trainSampleCount = 150;

             #region Generate the training data and classes
             Matrix<float> trainData = new Matrix<float>(trainSampleCount, 2);
             Matrix<int> trainClasses = new Matrix<int>(trainSampleCount, 1);

             Image<Bgr, Byte> img = new Image<Bgr, byte>(500, 500);

             Matrix<float> sample = new Matrix<float>(1, 2);

             Matrix<float> trainData1 = trainData.GetRows(0, trainSampleCount / 3, 1);
             trainData1.GetCols(0, 1).SetRandNormal(new MCvScalar(100), new MCvScalar(50));
             trainData1.GetCols(1, 2).SetRandNormal(new MCvScalar(300), new MCvScalar(50));

             Matrix<float> trainData2 = trainData.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
             trainData2.SetRandNormal(new MCvScalar(400), new MCvScalar(50));

             Matrix<float> trainData3 = trainData.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
             trainData3.GetCols(0, 1).SetRandNormal(new MCvScalar(300), new MCvScalar(50));
             trainData3.GetCols(1, 2).SetRandNormal(new MCvScalar(100), new MCvScalar(50));

             Matrix<int> trainClasses1 = trainClasses.GetRows(0, trainSampleCount / 3, 1);
             trainClasses1.SetValue(1);
             Matrix<int> trainClasses2 = trainClasses.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
             trainClasses2.SetValue(2);
             Matrix<int> trainClasses3 = trainClasses.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
             trainClasses3.SetValue(3);
             #endregion

             using (NormalBayesClassifier classifier = new NormalBayesClassifier() )
             {
            classifier.Train(trainData, trainClasses, null, null, false);

            classifier.Save("normalBayes.xml");

            #region Classify every image pixel
            for (int i = 0; i < img.Height; i++)
               for (int j = 0; j < img.Width; j++)
               {
                  sample.Data[0, 0] = i;
                  sample.Data[0, 1] = j;
                  int response = (int) classifier.Predict(sample, null);

                  Bgr color = colors[response -1];

                  img[j, i] = new Bgr(color.Blue * 0.5, color.Green * 0.5, color.Red * 0.5);
               }
            #endregion
             }

             // display the original training samples
             for (int i = 0; i < (trainSampleCount / 3); i++)
             {
            PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
            img.Draw(new CircleF(p1, 2.0f), colors[0], -1);
            PointF p2 = new PointF(trainData2[i, 0], trainData2[i, 1]);
            img.Draw(new CircleF(p2, 2.0f), colors[1], -1);
            PointF p3 = new PointF(trainData3[i, 0], trainData3[i, 1]);
            img.Draw(new CircleF(p3, 2.0f), colors[2], -1);
             }
        }
Ejemplo n.º 5
0
        public void TestKNearest()
        {
            int K = 10;
             int trainSampleCount = 100;

             #region Generate the training data and classes

             Matrix<float> trainData = new Matrix<float>(trainSampleCount, 2);
             Matrix<float> trainClasses = new Matrix<float>(trainSampleCount, 1);

             Image<Bgr, Byte> img = new Image<Bgr, byte>(500, 500);

             Matrix<float> sample = new Matrix<float>(1, 2);

             Matrix<float> trainData1 = trainData.GetRows(0, trainSampleCount >> 1, 1);
             trainData1.SetRandNormal(new MCvScalar(200), new MCvScalar(50));
             Matrix<float> trainData2 = trainData.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
             trainData2.SetRandNormal(new MCvScalar(300), new MCvScalar(50));

             Matrix<float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount >> 1, 1);
             trainClasses1.SetValue(1);
             Matrix<float> trainClasses2 = trainClasses.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
             trainClasses2.SetValue(2);
             #endregion

             Matrix<float> results, neighborResponses;
             results = new Matrix<float>(sample.Rows, 1);
             neighborResponses = new Matrix<float>(sample.Rows, K);
             //dist = new Matrix<float>(sample.Rows, K);

             using (KNearest knn = new KNearest(trainData, trainClasses, null, false, K))
             {
            //TODO: find out when knn.save will be implemented
            //knn.Save("knn.xml");

            for (int i = 0; i < img.Height; i++)
            {
               for (int j = 0; j < img.Width; j++)
               {
                  sample.Data[0, 0] = j;
                  sample.Data[0, 1] = i;

                  // estimates the response and get the neighbors' labels
                  float response = knn.FindNearest(sample, K, results, null, neighborResponses, null);

                  int accuracy = 0;
                  // compute the number of neighbors representing the majority
                  for (int k = 0; k < K; k++)
                  {
                     if (neighborResponses.Data[0, k] == response)
                        accuracy++;
                  }
                  // highlight the pixel depending on the accuracy (or confidence)
                  img[i, j] =
                     response == 1 ?
                        (accuracy > 5 ? new Bgr(90, 0, 0) : new Bgr(90, 40, 0)) :
                        (accuracy > 5 ? new Bgr(0, 90, 0) : new Bgr(40, 90, 0));
               }
            }
             }

             // display the original training samples
             for (int i = 0; i < (trainSampleCount >> 1); i++)
             {
            PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
            img.Draw(new CircleF(p1, 2.0f), new Bgr(255, 100, 100), -1);
            PointF p2 = new PointF(trainData2[i, 0], trainData2[i, 1]);
            img.Draw(new CircleF(p2, 2.0f), new Bgr(100, 255, 100), -1);
             }
        }
Ejemplo n.º 6
0
        private void TestEmgu()
        {
            int K = 10;
            //int trainSampleCount = 100;
            int trainSampleCount = this.vectorTable[0].Length-1;
            int trainSampleColumns = this.vectorTable.Length - 2; //subtract two columns for the post id and IsImage
            int scalingRatio = 10;

            #region Generate the traning data and classes

            Matrix<float> trainData = new Matrix<float>(trainSampleColumns, trainSampleCount);
            Matrix<float> trainClasses = new Matrix<float>(trainSampleColumns, 1);

            Image<Bgr, Byte> img = new Image<Bgr, byte>(trainSampleCount, trainSampleCount);

            Matrix<float> sample = new Matrix<float>(1, trainSampleCount);

            for (int y = 1; y < this.vectorTable[0].Length - 1; y++) {
                for (int x = 2; x < this.vectorTable.Length - 1; x++) {
                    trainData.Data.SetValue(Int32.Parse(this.vectorTable[x][y])*scalingRatio,x-2,y-1);
                }
            }

            Matrix<float> trainData1 = trainData.GetRows(0, trainSampleColumns >> 1, 1);
            //trainData1.SetRandNormal(new MCvScalar(200), new MCvScalar(50));
            Matrix<float> trainData2 = trainData.GetRows(trainSampleColumns >> 1, trainSampleColumns, 1);
            //trainData2.SetRandNormal(new MCvScalar(300), new MCvScalar(50));

            Matrix<float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount >> 1, 1);
            trainClasses1.SetValue(1);
            Matrix<float> trainClasses2 = trainClasses.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
            trainClasses2.SetValue(2);
            #endregion

            Matrix<float> results, neighborResponses;
            results = new Matrix<float>(sample.Rows, 1);
            neighborResponses = new Matrix<float>(sample.Rows, K);
            //dist = new Matrix<float>(sample.Rows, K);

            KNearest knn = new KNearest(trainData, trainClasses, null, false, K);
            for (int i = 0; i < img.Height; i++) {
                for (int j = 0; j < img.Width; j++) {
                    sample.Data[0, 0] = j;
                    sample.Data[0, 1] = i;

                    //Matrix<float> nearestNeighbors = new Matrix<float>(K* sample.Rows, sample.Cols);
                    // estimates the response and get the neighbors' labels
                    float response = knn.FindNearest(sample, K, results, null, neighborResponses, null);

                    int accuracy = 0;
                    // compute the number of neighbors representing the majority
                    for (int k = 0; k < K; k++) {
                        if (neighborResponses.Data[0, k] == response)
                            accuracy++;
                    }
                    // highlight the pixel depending on the accuracy (or confidence)
                    img[i, j] =
                    response == 1 ?
                        (accuracy > 5 ? new Bgr(90, 0, 0) : new Bgr(90, 60, 0)) :
                        (accuracy > 5 ? new Bgr(0, 90, 0) : new Bgr(60, 90, 0));
                }
            }

            // display the original training samples
            for (int i = 0; i < (trainSampleCount >> 1); i++) {
                PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
                img.Draw(new CircleF(p1, 2.0f), new Bgr(255, 100, 100), -1);
                PointF p2 = new PointF(trainData2[i, 0], trainData2[i, 1]);
                img.Draw(new CircleF(p2, 2.0f), new Bgr(100, 255, 100), -1);
            }

            //Emgu.CV.UI.ImageViewer.Show(img);
            Emgu.CV.UI.ImageViewer imgviewer = new Emgu.CV.UI.ImageViewer(img);
            imgviewer.Show();
        }
Ejemplo n.º 7
0
      public void TestRTreesLetterRecognition()
      {
         Matrix<float> data, response;
         ReadLetterRecognitionData(out data, out response);

         int trainingSampleCount = (int) (data.Rows * 0.8);

         Matrix<Byte> varType = new Matrix<byte>(data.Cols + 1, 1);
         varType.SetValue((byte) MlEnum.VarType.Numerical); //the data is numerical
         varType[data.Cols, 0] = (byte) MlEnum.VarType.Categorical; //the response is catagorical

         Matrix<byte> sampleIdx = new Matrix<byte>(data.Rows, 1);
         using (Matrix<byte> sampleRows = sampleIdx.GetRows(0, trainingSampleCount, 1))
            sampleRows.SetValue(255);

         using (RTrees forest = new RTrees())
         using (TrainData td = new TrainData(data, MlEnum.DataLayoutType.RowSample, response, null, sampleIdx, null, varType))
         {
            forest.MaxDepth = 10;
            forest.MinSampleCount = 10;
            forest.RegressionAccuracy = 0.0f;
            forest.UseSurrogates = false;
            forest.MaxCategories = 15;
            forest.CalculateVarImportance = true;
            forest.ActiveVarCount = 4;
            forest.TermCriteria = new MCvTermCriteria(100, 0.01f);
            bool success = forest.Train(td);

            if (!success)
               return;
            
            double trainDataCorrectRatio = 0;
            double testDataCorrectRatio = 0;
            for (int i = 0; i < data.Rows; i++)
            {
               using (Matrix<float> sample = data.GetRow(i))
               {
                  double r = forest.Predict(sample, null);
                  r = Math.Abs(r - response[i, 0]);
                  if (r < 1.0e-5)
                  {
                     if (i < trainingSampleCount)
                        trainDataCorrectRatio++;
                     else
                        testDataCorrectRatio++;
                  }
               }
            }

            trainDataCorrectRatio /= trainingSampleCount;
            testDataCorrectRatio /= (data.Rows - trainingSampleCount);

            StringBuilder builder = new StringBuilder("Variable Importance: ");
            /*
            using (Matrix<float> varImportance = forest.VarImportance)
            {
               for (int i = 0; i < varImportance.Cols; i++)
               {
                  builder.AppendFormat("{0} ", varImportance[0, i]);
               }
            }*/

            EmguAssert.WriteLine(String.Format("Prediction accuracy for training data :{0}%", trainDataCorrectRatio * 100));
            EmguAssert.WriteLine(String.Format("Prediction accuracy for test data :{0}%", testDataCorrectRatio * 100));
            EmguAssert.WriteLine(builder.ToString());
         }
      }
Ejemplo n.º 8
0
      public void TestNormalBayesClassifier()
      {
         Bgr[] colors = new Bgr[] { 
            new Bgr(0, 0, 255), 
            new Bgr(0, 255, 0),
            new Bgr(255, 0, 0)};
         int trainSampleCount = 150;

         #region Generate the training data and classes
         Matrix<float> trainData = new Matrix<float>(trainSampleCount, 2);
         Matrix<int> trainClasses = new Matrix<int>(trainSampleCount, 1);
         
         Image<Bgr, Byte> img = new Image<Bgr, byte>(500, 500);

         Matrix<float> sample = new Matrix<float>(1, 2);

         Matrix<float> trainData1 = trainData.GetRows(0, trainSampleCount / 3, 1);
         trainData1.GetCols(0, 1).SetRandNormal(new MCvScalar(100), new MCvScalar(50));
         trainData1.GetCols(1, 2).SetRandNormal(new MCvScalar(300), new MCvScalar(50));

         Matrix<float> trainData2 = trainData.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
         trainData2.SetRandNormal(new MCvScalar(400), new MCvScalar(50));

         Matrix<float> trainData3 = trainData.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
         trainData3.GetCols(0, 1).SetRandNormal(new MCvScalar(300), new MCvScalar(50));
         trainData3.GetCols(1, 2).SetRandNormal(new MCvScalar(100), new MCvScalar(50));

         Matrix<int> trainClasses1 = trainClasses.GetRows(0, trainSampleCount / 3, 1);
         trainClasses1.SetValue(1);
         Matrix<int> trainClasses2 = trainClasses.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
         trainClasses2.SetValue(2);
         Matrix<int> trainClasses3 = trainClasses.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
         trainClasses3.SetValue(3);
         #endregion

         using (TrainData td = new TrainData(trainData, MlEnum.DataLayoutType.RowSample, trainClasses))
         using (NormalBayesClassifier classifier = new NormalBayesClassifier())
         {
            //ParamDef[] defs = classifier.GetParams();
            classifier.Train(trainData, MlEnum.DataLayoutType.RowSample, trainClasses);
            classifier.Clear();
            classifier.Train(td);
#if !NETFX_CORE
            String fileName = Path.Combine(Path.GetTempPath(), "normalBayes.xml");
            classifier.Save(fileName);
            if (File.Exists(fileName))
               File.Delete(fileName);
#endif

            #region Classify every image pixel
            for (int i = 0; i < img.Height; i++)
               for (int j = 0; j < img.Width; j++)
               {
                  sample.Data[0, 0] = i;
                  sample.Data[0, 1] = j;
                  int response = (int) classifier.Predict(sample, null);

                  Bgr color = colors[response - 1];

                  img[j, i] = new Bgr(color.Blue * 0.5, color.Green * 0.5, color.Red * 0.5);
               }
            #endregion 
         }

         // display the original training samples
         for (int i = 0; i < (trainSampleCount / 3); i++)
         {
            PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
            img.Draw(new CircleF(p1, 2.0f), colors[0], -1);
            PointF p2 = new PointF(trainData2[i, 0], trainData2[i, 1]);
            img.Draw(new CircleF(p2, 2.0f), colors[1], -1);
            PointF p3 = new PointF(trainData3[i, 0], trainData3[i, 1]);
            img.Draw(new CircleF(p3, 2.0f), colors[2], -1);
         }

         //Emgu.CV.UI.ImageViewer.Show(img);
      }
Ejemplo n.º 9
0
        static void Main(string[] args)
        {
            int trainSampleCount = 0;
            Image<Bgr, Byte> img = new Image<Bgr, byte>(500, 500);

            //conversion of CSV to gesture variables
            List<Gesture> gestureListClass1 = csvToGestureList(@"C:\Users\Dan\Desktop\thesis data\testEB-3-20.csv");
            List<Gesture> gestureListClass2 = csvToGestureList(@"C:\Users\Dan\Desktop\thesis data\testSNC-3-20.csv");

            trainSampleCount = (gestureListClass1.Count) + (gestureListClass2.Count); //set the sample count to the number of gestures we have available

            //create relevant matrices based on size of the gestureList
            Matrix<float> sample = new Matrix<float>(1, 2); //a sample has 2 columns because of 2 features
            Matrix<float> trainTestData = new Matrix<float>(trainSampleCount, 2);
            Matrix<float> trainTestClasses = new Matrix<float>(trainSampleCount, 1);

            //GESTURE MATH INCOMING
            foreach (Gesture g in gestureListClass1)
            {
                g.runMetrics();
            }

            foreach (Gesture g in gestureListClass2)
            {
                g.runMetrics();
            }

            #region Generate the training data and classes

            //fill first set of data
            for (int i = 0; i < gestureListClass1.Count; i++)
            {
                double[] gMetrics = (gestureListClass1[i].returnMetrics()).ToArray();

                /*order of values
                 * list[0] - xyRatio
                 * list[1] - totalGestureTime
                 */
                trainTestData[i, 0] = ((float)gMetrics[0])*150;
                trainTestData[i, 1] = ((float)gMetrics[1])/4;
            }

            Matrix<float> trainTestData1 = trainTestData.GetRows(0, gestureListClass1.Count, 1);

            for (int j = 0; j < gestureListClass2.Count; j++)
            {
                double[] gMetrics = (gestureListClass2[j].returnMetrics()).ToArray();

                trainTestData[(j + gestureListClass1.Count), 0] = (float)gMetrics[0] * 150;
                trainTestData[(j + gestureListClass1.Count), 1] = ((float)gMetrics[1])/4;
            }

            Matrix<float> trainTestData2 = trainTestData.GetRows(gestureListClass1.Count, trainSampleCount, 1);

            Matrix<float> trainTestClasses1 = trainTestClasses.GetRows(0, gestureListClass1.Count, 1);
            trainTestClasses1.SetValue(1);
            Matrix<float> trainTestClasses2 = trainTestClasses.GetRows(gestureListClass1.Count, trainSampleCount, 1);
            trainTestClasses2.SetValue(2);
            #endregion

            using (SVM model = new SVM())
            {
                SVMParams p = new SVMParams();
                p.KernelType = Emgu.CV.ML.MlEnum.SVM_KERNEL_TYPE.LINEAR;
                p.SVMType = Emgu.CV.ML.MlEnum.SVM_TYPE.C_SVC;
                //p.Gamma = 0.1;
                p.C = 10;
                p.TermCrit = new MCvTermCriteria(100, 0.00001);

                //bool trained = model.Train(trainTestData, trainTestClasses, null, null, p);
                bool trained = model.TrainAuto(trainTestData, trainTestClasses, null, null, p.MCvSVMParams, 5);

                for (int i = 0; i < img.Height; i++)
                {
                    for (int j = 0; j < img.Width; j++)
                    {
                        sample.Data[0, 0] = j;
                        sample.Data[0, 1] = i;

                        float response = model.Predict(sample);

                        img[i, j] =
                           response == 1 ? new Bgr(90, 0, 0) :
                           new Bgr(0, 0, 90);
                           //response == 2 ? new Bgr(0, 90, 0) :
                    }
                }

                int c = model.GetSupportVectorCount();
                for (int i = 0; i < c; i++)
                {
                    float[] v = model.GetSupportVector(i);
                    PointF p1 = new PointF(v[0], v[1]);
                    img.Draw(new CircleF(p1, 4), new Bgr(255, 255, 128), 2);
                }
            }

            // display the original training samples
            for (int i = 0; i < (trainSampleCount / 2); i++)
            {
                if (i < trainTestData1.Rows)
                {
                    PointF p1 = new PointF((trainTestData1[i, 0]), (trainTestData1[i, 1]));
                    img.Draw(new CircleF(p1, 2.0f), new Bgr(255, 100, 100), -1);
                }

                if (i < trainTestData2.Rows)
                {
                    PointF p2 = new PointF((trainTestData2[i, 0]), (trainTestData2[i, 1]));
                    img.Draw(new CircleF(p2, 2.0f), new Bgr(100, 100, 255), -1);
                }
            }

            Emgu.CV.UI.ImageViewer.Show(img);
        }
Ejemplo n.º 10
0
        private Matrix Hminired(Matrix A)
        {
            //function A=hminired(A)
            //%HMINIRED Initial reduction of cost matrix for the Hungarian method.
            //%
            //%B=assredin(A)
            //%A - the unreduced cost matris.
            //%B - the reduced cost matrix with linked zeros in each row.

            //% v1.0  96-06-13. Niclas Borlin, [email protected].

            //[m,n]=size(A);
            int m = A.Rows, n = A.Columns;

            //% Subtract column-minimum values from each column.
            //colMin=min(A);
            var colMin = new DenseVector(A.GetColumns().Select(col => col.Min()).ToArray());
            //A=A-colMin(ones(n,1),:);
            for (int i = 0; i < A.Rows; ++i) {
                A.SetRow(i, A.GetRow(i) - colMin);
            }

            //% Subtract row-minimum values from each row.
            //rowMin=min(A')';
            var rowMin = new DenseVector(A.GetRows().Select(row => row.Min()).ToArray());
            //A=A-rowMin(:,ones(1,n));
            for (int j = 0; j < A.Rows; ++j) {
                A.SetColumn(j, A.GetColumn(j) - rowMin);
            }

            //% Get positions of all zeros.
            //[i,j]=find(A==0);
            List<int> ilist = new List<int>();
            List<int> jlist = new List<int>();
            A.EachT((v, i, j) => {
                if (v == 0) {
                    ilist.Add(i);
                    jlist.Add(j);
                }
            });

            //% Extend A to give room for row zero list header column.
            //A(1,n+1)=0;
            Matrix tmp = Zeros(n, n + 1);
            tmp.SetSubMatrix(0, n, 0, n, A);
            //for k=1:n
            for (int k = 0; k < n; ++k) {
                //    % Get all column in this row.
                //    cols=j(k==i)';
                var cols = new List<int>();
                cols.Add(n);
                for (int i = 0; i < ilist.Count; ++i) {
                    if (ilist[i] == k) {
                        cols.Add(jlist[i]);
                    }
                }
                cols.Add(-1);

                //    % Insert pointers in matrix.
                //    A(k,[n+1 cols])=[-cols 0];
                for (int i = 0; i < cols.Count - 1; ++i) {
                    tmp[k, cols[i]] = -(cols[i + 1]) - 1;
                } // TODO 不知道对不对了
                //result[k, cols[cols.Count - 1]] = 0;
                //end
            }
            var result = tmp.Each(v => {
                if (v < 0) return v + 1;
                else if (v == 0) return NoMatch;
                else return v;
            });

            return result;
        }
Ejemplo n.º 11
0
        private Image<Bgr, Byte> svm()
        {
            Stopwatch timer = new Stopwatch();
            timer.Start();
            int trainSampleCount = 150;
            int sigma = 60;

            #region Generate the training data and classes

            Matrix<float> trainData = new Matrix<float>(trainSampleCount, 2);
            Matrix<float> trainClasses = new Matrix<float>(trainSampleCount, 1);

            Image<Bgr, Byte> img = new Image<Bgr, byte>(500, 500);

            Matrix<float> sample = new Matrix<float>(1, 2);

            Matrix<float> trainData1 = trainData.GetRows(0, trainSampleCount / 3, 1);
            trainData1.GetCols(0, 1).SetRandNormal(new MCvScalar(100), new MCvScalar(sigma));
            trainData1.GetCols(1, 2).SetRandNormal(new MCvScalar(300), new MCvScalar(sigma));

            Matrix<float> trainData2 = trainData.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
            trainData2.SetRandNormal(new MCvScalar(400), new MCvScalar(sigma));

            Matrix<float> trainData3 = trainData.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
            trainData3.GetCols(0, 1).SetRandNormal(new MCvScalar(300), new MCvScalar(sigma));
            trainData3.GetCols(1, 2).SetRandNormal(new MCvScalar(100), new MCvScalar(sigma));

            Matrix<float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount / 3, 1);
            trainClasses1.SetValue(1);
            Matrix<float> trainClasses2 = trainClasses.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
            trainClasses2.SetValue(2);
            Matrix<float> trainClasses3 = trainClasses.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
            trainClasses3.SetValue(3);

            #endregion

            timer.Stop();
            MessageBox.Show("生成" + timer.ElapsedMilliseconds + "ms");
            timer.Reset();
            timer.Start();

            using (SVM model = new SVM()) {
                SVMParams p = new SVMParams();
                p.KernelType = Emgu.CV.ML.MlEnum.SVM_KERNEL_TYPE.LINEAR;
                p.SVMType = Emgu.CV.ML.MlEnum.SVM_TYPE.C_SVC;
                p.C = 1;
                p.TermCrit = new MCvTermCriteria(100, 0.00001);

                //model.Load(@"D:\Play Data\训练数据");
                //bool trained = model.Train(trainData, trainClasses, null, null, p);
                bool trained = model.TrainAuto(trainData, trainClasses, null, null, p.MCvSVMParams, 5);
                timer.Stop();
                MessageBox.Show("训练" + timer.ElapsedMilliseconds + "ms");
                timer.Reset();
                timer.Start();

                for (int i = 0; i < img.Height; i++) {
                    for (int j = 0; j < img.Width; j++) {
                        sample.Data[0, 0] = j;
                        sample.Data[0, 1] = i;

                        //float response = model.Predict(sample);

                        //img[i, j] =
                        //   response == 1 ? new Bgr(90, 0, 0) :
                        //   response == 2 ? new Bgr(0, 90, 0) :
                        //   new Bgr(0, 0, 90);
                    }
                }
                //model.Save(@"D:\Play Data\训练数据");

                timer.Stop();
                MessageBox.Show("染色" + timer.ElapsedMilliseconds + "ms");
                timer.Reset();
                timer.Start();
                int c = model.GetSupportVectorCount();
                for (int i = 0; i < c; i++) {
                    float[] v = model.GetSupportVector(i);
                    PointF p1 = new PointF(v[0], v[1]);
                    img.Draw(new CircleF(p1, 4), new Bgr(128, 128, 128), 2);
                }
                timer.Stop();
                MessageBox.Show("画圈" + timer.ElapsedMilliseconds + "ms");
                timer.Reset();
                timer.Start();
            }

            // display the original training samples
            for (int i = 0; i < (trainSampleCount / 3); i++) {
                PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
                img.Draw(new CircleF(p1, 2.0f), new Bgr(255, 100, 100), -1);
                PointF p2 = new PointF(trainData2[i, 0], trainData2[i, 1]);
                img.Draw(new CircleF(p2, 2.0f), new Bgr(100, 255, 100), -1);
                PointF p3 = new PointF(trainData3[i, 0], trainData3[i, 1]);
                img.Draw(new CircleF(p3, 2.0f), new Bgr(100, 100, 255), -1);
            }
            timer.Stop();
            MessageBox.Show("标点" + timer.ElapsedMilliseconds + "ms");
            timer.Reset();
            timer.Start();

            return img;
        }
Ejemplo n.º 12
0
        private Image<Bgr, Byte> knn()
        {
            int K = 10;
            int trainSampleCount = 150;
            int sigma = 60;

            #region Generate the training data and classes

            Matrix<float> trainData = new Matrix<float>(trainSampleCount, 2);
            Matrix<float> trainClasses = new Matrix<float>(trainSampleCount, 1);

            Image<Bgr, Byte> img = new Image<Bgr, byte>(500, 500);

            Matrix<float> sample = new Matrix<float>(1, 2);

            Matrix<float> trainData1 = trainData.GetRows(0, trainSampleCount / 3, 1);
            trainData1.GetCols(0, 1).SetRandNormal(new MCvScalar(100), new MCvScalar(sigma));
            trainData1.GetCols(1, 2).SetRandNormal(new MCvScalar(300), new MCvScalar(sigma));

            Matrix<float> trainData2 = trainData.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
            trainData2.SetRandNormal(new MCvScalar(400), new MCvScalar(sigma));

            Matrix<float> trainData3 = trainData.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
            trainData3.GetCols(0, 1).SetRandNormal(new MCvScalar(300), new MCvScalar(sigma));
            trainData3.GetCols(1, 2).SetRandNormal(new MCvScalar(100), new MCvScalar(sigma));

            Matrix<float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount / 3, 1);
            trainClasses1.SetValue(1);
            Matrix<float> trainClasses2 = trainClasses.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
            trainClasses2.SetValue(2);
            Matrix<float> trainClasses3 = trainClasses.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
            trainClasses3.SetValue(3);

            #endregion

            Matrix<float> results, neighborResponses;
            results = new Matrix<float>(sample.Rows, 1);
            neighborResponses = new Matrix<float>(sample.Rows, K);
            //dist = new Matrix<float>(sample.Rows, K);

            //using (KNearest knn = new KNearest(trainData, trainClasses, null, false, K)) {
            using (KNearest knn = new KNearest()) {
                bool trained = knn.Train(trainData, trainClasses, null, false, K, false);

                for (int i = 0; i < img.Height; i++) {
                    for (int j = 0; j < img.Width; j++) {
                        sample.Data[0, 0] = j;
                        sample.Data[0, 1] = i;

                        //Matrix<float> nearestNeighbors = new Matrix<float>(K* sample.Rows, sample.Cols);
                        // estimates the response and get the neighbors' labels
                        float response = knn.FindNearest(sample, K, results, null, neighborResponses, null);

                        int accuracy = 0;
                        // compute the number of neighbors representing the majority
                        for (int k = 0; k < K; k++) {
                            if (neighborResponses.Data[0, k] == response)
                                accuracy++;
                        }
                        // highlight the pixel depending on the accuracy (or confidence)
                        //img[i, j] =
                        //response == 1 ?
                        //    (accuracy > 5 ? new Bgr(90, 0, 0) : new Bgr(90, 60, 0)) :
                        //    (accuracy > 5 ? new Bgr(0, 90, 0) : new Bgr(60, 90, 0));
                        img[i, j] =
                            response == 1 ? (accuracy > 5 ? new Bgr(90, 0, 0) : new Bgr(90, 30, 30)) :
                           response == 2 ? (accuracy > 5 ? new Bgr(0, 90, 0) : new Bgr(30, 90, 30)) :
                            (accuracy > 5 ? new Bgr(0, 0, 90) : new Bgr(30, 30, 90));
                    }
                }
                knn.Save(@"D:\Play Data\KNN训练数据");
            }

            // display the original training samples

            for (int i = 0; i < (trainSampleCount / 3); i++) {
                PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
                img.Draw(new CircleF(p1, 2.0f), new Bgr(255, 100, 100), -1);
                PointF p2 = new PointF(trainData2[i, 0], trainData2[i, 1]);
                img.Draw(new CircleF(p2, 2.0f), new Bgr(100, 255, 100), -1);
                PointF p3 = new PointF(trainData3[i, 0], trainData3[i, 1]);
                img.Draw(new CircleF(p3, 2.0f), new Bgr(100, 100, 255), -1);
            }
            return img;
        }
Ejemplo n.º 13
0
        private Image<Bgr, Byte> kmeans()
        {
            int trainSampleCount = 1500;
            int sigma = 60;
            Matrix<float> trainData = new Matrix<float>(trainSampleCount, 2);
            Matrix<float> trainData1 = trainData.GetRows(0, trainSampleCount / 3, 1);
            trainData1.GetCols(0, 1).SetRandNormal(new MCvScalar(100), new MCvScalar(sigma));
            trainData1.GetCols(1, 2).SetRandNormal(new MCvScalar(300), new MCvScalar(sigma));

            Matrix<float> trainData2 = trainData.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
            trainData2.SetRandNormal(new MCvScalar(400), new MCvScalar(sigma));

            Matrix<float> trainData3 = trainData.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
            trainData3.GetCols(0, 1).SetRandNormal(new MCvScalar(300), new MCvScalar(sigma));
            trainData3.GetCols(1, 2).SetRandNormal(new MCvScalar(100), new MCvScalar(sigma));

            PointF[] points = new PointF[trainSampleCount];
            for (int i = 0; i < points.Length; ++i) {
                points[i] = new PointF(trainData[i, 0], trainData[i, 1]);
            }
            var km = new KMeans<PointF>(points, 3,
                (a, b) => ((a.X - b.X) * (a.X - b.X) + (a.Y - b.Y) * (a.Y - b.Y)),
                list => new PointF(list.Average(p => p.X), list.Average(p => p.Y))
            );
            int it = 0;
            MyTimer timer = new MyTimer();
            timer.Restart();
            //var cluster = km.Cluster();
            var cluster = km.AnnealCluster(
                (a, b) => new PointF(a.X + b.X, a.Y + b.Y),
                (a, b) => new PointF(a.X - b.X, a.Y - b.Y),
                (p, v) => new PointF((float)(p.X / v), (float)(p.Y / v)),
                out it);
            var time = timer.Stop();
            this.Text = String.Format("n={0}, k={1}, time={2}ms, iter={3}.", trainSampleCount, 3, time, it);

            Image<Bgr, Byte> img = new Image<Bgr, byte>(500, 500);
            for (int y = 0; y < 500; ++y) {
                for (int x = 0; x < 500; ++x) {
                    double d0 = (x - cluster[0].Center.X) * (x - cluster[0].Center.X)
                              + (y - cluster[0].Center.Y) * (y - cluster[0].Center.Y);
                    double d1 = (x - cluster[1].Center.X) * (x - cluster[1].Center.X)
                              + (y - cluster[1].Center.Y) * (y - cluster[1].Center.Y);
                    double d2 = (x - cluster[2].Center.X) * (x - cluster[2].Center.X)
                              + (y - cluster[2].Center.Y) * (y - cluster[2].Center.Y);
                    Bgr color = new Bgr(0, 0, 0);
                    if (d0 < d1 && d0 < d2) {
                        color = new Bgr(20, 0, 0);
                    }
                    if (d1 < d0 && d1 < d2) {
                        color = new Bgr(0, 20, 0);
                    }
                    if (d2 < d0 && d2 < d1) {
                        color = new Bgr(0, 0, 20);
                    }
                    img[y, x] = color;
                }
            }
            Bgr[] colors = new[] { new Bgr(128, 0, 0), new Bgr(0, 128, 0), new Bgr(0, 0, 128) };
            Bgr[] centers = new[] { new Bgr(255, 0, 0), new Bgr(0, 255, 0), new Bgr(0, 0, 255) };
            for (int i = 0; i < 3; ++i) {
                foreach (var p in cluster[i]) {
                    img.Draw(new CircleF(p, 2), colors[i], 1);
                }
                img.Draw(new CircleF(cluster[i].Center, 5), centers[i], 3);
            }
            img.Draw(new CircleF(new PointF(100, 300), sigma), new Bgr(128, 128, 128), 2);
            img.Draw(new CircleF(new PointF(100, 300), 3), new Bgr(128, 128, 128), 2);
            img.Draw(new CircleF(new PointF(300, 100), sigma), new Bgr(128, 128, 128), 2);
            img.Draw(new CircleF(new PointF(300, 100), 3), new Bgr(128, 128, 128), 2);
            img.Draw(new CircleF(new PointF(400, 400), sigma), new Bgr(128, 128, 128), 2);
            img.Draw(new CircleF(new PointF(400, 400), 3), new Bgr(128, 128, 128), 2);

            return img;
        }
Ejemplo n.º 14
0
        public void TestDTreesMushroom()
        {
            Matrix<float> data, response;
             ReadMushroomData(out data, out response);

             //Use the first 80% of data as training sample
             int trainingSampleCount = (int)(data.Rows * 0.8);

             Matrix<Byte> varType = new Matrix<byte>(data.Cols + 1, 1);
             varType.SetValue((byte)MlEnum.VAR_TYPE.CATEGORICAL); //the data is categorical

             Matrix<byte> sampleIdx = new Matrix<byte>(data.Rows, 1);
             using (Matrix<byte> sampleRows = sampleIdx.GetRows(0, trainingSampleCount, 1))
            sampleRows.SetValue(255);

             float[] priors = new float[] {1, 0.5f};
             GCHandle priorsHandle = GCHandle.Alloc(priors, GCHandleType.Pinned);

             MCvDTreeParams param = new MCvDTreeParams();
             param.maxDepth = 8;
             param.minSampleCount = 10;
             param.regressionAccuracy = 0;
             param.useSurrogates = true;
             param.maxCategories = 15;
             param.cvFolds = 10;
             param.use1seRule = true;
             param.truncatePrunedTree = true;
             param.priors = priorsHandle.AddrOfPinnedObject();

             using (DTree dtree = new DTree())
             {
            bool success = dtree.Train(
               data,
               Emgu.CV.ML.MlEnum.DATA_LAYOUT_TYPE.ROW_SAMPLE,
               response,
               null,
               sampleIdx,
               varType,
               null,
               param);

            if (!success) return;
            double trainDataCorrectRatio = 0;
            double testDataCorrectRatio = 0;
            for (int i = 0; i < data.Rows; i++)
            {
               using (Matrix<float> sample = data.GetRow(i))
               {
                  double r = dtree.Predict(sample, null, false).value;
                  r = Math.Abs(r - response[i, 0]);
                  if (r < 1.0e-5)
                  {
                     if (i < trainingSampleCount)
                        trainDataCorrectRatio++;
                     else
                        testDataCorrectRatio++;
                  }
               }
            }

            trainDataCorrectRatio /= trainingSampleCount;
            testDataCorrectRatio /= (data.Rows - trainingSampleCount);

            Trace.WriteLine(String.Format("Prediction accuracy for training data :{0}%", trainDataCorrectRatio*100));
            Trace.WriteLine(String.Format("Prediction accuracy for test data :{0}%", testDataCorrectRatio*100));
             }

             priorsHandle.Free();
        }
Ejemplo n.º 15
0
        public void TestEM()
        {
            int N = 4; //number of clusters
             int N1 = (int)Math.Sqrt((double)4);

             Bgr[] colors = new Bgr[] {
            new Bgr(0, 0, 255),
            new Bgr(0, 255, 0),
            new Bgr(0, 255, 255),
            new Bgr(255, 255, 0)};

             int nSamples = 100;

             Matrix<float> samples = new Matrix<float>(nSamples, 2);
             Matrix<Int32> labels = new Matrix<int>(nSamples, 1);
             Image<Bgr, Byte> img = new Image<Bgr,byte>(500, 500);
             Matrix<float> sample = new Matrix<float>(1, 2);

             CvInvoke.cvReshape(samples.Ptr, samples.Ptr, 2, 0);
             for (int i = 0; i < N; i++)
             {
            Matrix<float> rows = samples.GetRows(i * nSamples / N, (i + 1) * nSamples / N, 1);
            double scale = ((i % N1) + 1.0) / (N1 + 1);
            MCvScalar mean = new MCvScalar(scale * img.Width, scale * img.Height);
            MCvScalar sigma = new MCvScalar(30, 30);
            rows.SetRandNormal(mean, sigma);
             }
             CvInvoke.cvReshape(samples.Ptr, samples.Ptr, 1, 0);

             using (EM emModel1 = new EM())
             using (EM emModel2 = new EM())
             {
            EMParams parameters1 = new EMParams();
            parameters1.Nclusters = N;
            parameters1.CovMatType = Emgu.CV.ML.MlEnum.EM_COVARIAN_MATRIX_TYPE.COV_MAT_DIAGONAL;
            parameters1.StartStep = Emgu.CV.ML.MlEnum.EM_INIT_STEP_TYPE.START_AUTO_STEP;
            parameters1.TermCrit = new MCvTermCriteria(10, 0.01);
            emModel1.Train(samples, null, parameters1, labels);

            EMParams parameters2 = new EMParams();
            parameters2.Nclusters = N;
            parameters2.CovMatType = Emgu.CV.ML.MlEnum.EM_COVARIAN_MATRIX_TYPE.COV_MAT_GENERIC;
            parameters2.StartStep = Emgu.CV.ML.MlEnum.EM_INIT_STEP_TYPE.START_E_STEP;
            parameters2.TermCrit = new MCvTermCriteria(100, 1.0e-6);
            parameters2.Means = emModel1.Means;
            parameters2.Covs = emModel1.GetCovariances();
            parameters2.Weights = emModel1.Weights;

            emModel2.Train(samples, null, parameters2, labels);

            //TODO: Find out when saving of EM model will be enable
            //emModel2.Save("emModel.xml");

            #region Classify every image pixel
            for (int i = 0; i < img.Height; i++)
               for (int j = 0; j < img.Width; j++)
               {
                  sample.Data[0, 0] = i;
                  sample.Data[0, 1] = j;
                  int response = (int) emModel2.Predict(sample, null);

                  Bgr color = colors[response];

                  img[j, i] = new Bgr(color.Blue*0.5, color.Green * 0.5, color.Red * 0.5 );
               }
            #endregion

            #region draw the clustered samples
            for (int i = 0; i < nSamples; i++)
            {
               img.Draw(new CircleF(new PointF(samples.Data[i, 0], samples.Data[i, 1]), 1), colors[labels.Data[i, 0]], 0);
            }
            #endregion
             }
        }
Ejemplo n.º 16
0
      public void TestSVM()
      {
         int trainSampleCount = 150;
         int sigma = 60;

         #region Generate the training data and classes

         Matrix<float> trainData = new Matrix<float>(trainSampleCount, 2);
         Matrix<float> trainClasses = new Matrix<float>(trainSampleCount, 1);

         Image<Bgr, Byte> img = new Image<Bgr, byte>(500, 500);

         Matrix<float> sample = new Matrix<float>(1, 2);

         Matrix<float> trainData1 = trainData.GetRows(0, trainSampleCount / 3, 1);
         trainData1.GetCols(0, 1).SetRandNormal(new MCvScalar(100), new MCvScalar(sigma));
         trainData1.GetCols(1, 2).SetRandNormal(new MCvScalar(300), new MCvScalar(sigma));

         Matrix<float> trainData2 = trainData.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
         trainData2.SetRandNormal(new MCvScalar(400), new MCvScalar(sigma));

         Matrix<float> trainData3 = trainData.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
         trainData3.GetCols(0, 1).SetRandNormal(new MCvScalar(300), new MCvScalar(sigma));
         trainData3.GetCols(1, 2).SetRandNormal(new MCvScalar(100), new MCvScalar(sigma));

         Matrix<float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount / 3, 1);
         trainClasses1.SetValue(1);
         Matrix<float> trainClasses2 = trainClasses.GetRows(trainSampleCount / 3, 2 * trainSampleCount / 3, 1);
         trainClasses2.SetValue(2);
         Matrix<float> trainClasses3 = trainClasses.GetRows(2 * trainSampleCount / 3, trainSampleCount, 1);
         trainClasses3.SetValue(3);

         #endregion
         //using (SVM.Params p = new SVM.Params(MlEnum.SvmType.CSvc, MlEnum.SvmKernelType.Linear, 0, 1, 0, 1, 0, 0, null, new MCvTermCriteria(100, 1.0e-6)))
         using (SVM model = new SVM())
         using (Matrix<int> trainClassesInt = trainClasses.Convert<int>())
         using (TrainData td = new TrainData(trainData, MlEnum.DataLayoutType.RowSample, trainClassesInt))
         {
            model.Type = SVM.SvmType.CSvc;
            model.SetKernel(SVM.SvmKernelType.Inter);
            model.Degree = 0;
            model.Gamma = 1;
            model.Coef0 = 0;
            model.C = 1;
            model.Nu = 0;
            model.P = 0;
            model.TermCriteria = new MCvTermCriteria(100, 1.0e-6);
            //bool trained = model.TrainAuto(td, 5);
            model.Train(td);
#if !NETFX_CORE
            String fileName = "svmModel.xml";
            //String fileName = Path.Combine(Path.GetTempPath(), "svmModel.xml");
            model.Save(fileName);

            SVM model2 = new SVM();
            FileStorage fs = new FileStorage(fileName,  FileStorage.Mode.Read);
            model2.Read(fs.GetFirstTopLevelNode());

            if (File.Exists(fileName))
               File.Delete(fileName);
#endif

            for (int i = 0; i < img.Height; i++)
            {
               for (int j = 0; j < img.Width; j++)
               {
                  sample.Data[0, 0] = j;
                  sample.Data[0, 1] = i;

                  float response = model.Predict(sample);

                  img[i, j] =
                     response == 1 ? new Bgr(90, 0, 0) :
                     response == 2 ? new Bgr(0, 90, 0) :
                     new Bgr(0, 0, 90);
               }
            }
            Mat supportVectors = model.GetSupportVectors();
            //TODO: find out how to draw the support vectors
            Image<Gray, float> pts = supportVectors.ToImage<Gray, float>(); 
            PointF[] vectors = new PointF[supportVectors.Rows];
            GCHandle handler = GCHandle.Alloc(vectors, GCHandleType.Pinned);
            using (
               Mat vMat = new Mat(supportVectors.Rows, supportVectors.Cols, DepthType.Cv32F, 1,
                  handler.AddrOfPinnedObject(), supportVectors.Cols*4))
            {
               supportVectors.CopyTo(vMat);
            }
            handler.Free();

            /*
            int c = model.GetSupportVectorCount();
            for (int i = 0; i < c; i++)
            {
               float[] v = model.GetSupportVector(i);
               PointF p1 = new PointF(v[0], v[1]);
               img.Draw(new CircleF(p1, 4), new Bgr(128, 128, 128), 2);
            }*/
         }

         // display the original training samples
         for (int i = 0; i < (trainSampleCount / 3); i++)
         {
            PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
            img.Draw(new CircleF(p1, 2.0f), new Bgr(255, 100, 100), -1);
            PointF p2 = new PointF(trainData2[i, 0], trainData2[i, 1]);
            img.Draw(new CircleF(p2, 2.0f), new Bgr(100, 255, 100), -1);
            PointF p3 = new PointF(trainData3[i, 0], trainData3[i, 1]);
            img.Draw(new CircleF(p3, 2.0f), new Bgr(100, 100, 255), -1);
         }

         //Emgu.CV.UI.ImageViewer.Show(img);
      }
Ejemplo n.º 17
0
        public void TestANN_MLP()
        {
            int trainSampleCount = 100;

             #region Generate the traning data and classes
             Matrix<float> trainData = new Matrix<float>(trainSampleCount, 2);
             Matrix<float> trainClasses = new Matrix<float>(trainSampleCount, 1);

             Image<Bgr, Byte> img = new Image<Bgr, byte>(500, 500);

             Matrix<float> sample = new Matrix<float>(1, 2);
             Matrix<float> prediction = new Matrix<float>(1, 1);

             Matrix<float> trainData1 = trainData.GetRows(0, trainSampleCount >> 1, 1);
             trainData1.SetRandNormal(new MCvScalar(200), new MCvScalar(50));
             Matrix<float> trainData2 = trainData.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
             trainData2.SetRandNormal(new MCvScalar(300), new MCvScalar(50));

             Matrix<float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount >> 1, 1);
             trainClasses1.SetValue(1);
             Matrix<float> trainClasses2 = trainClasses.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
             trainClasses2.SetValue(2);
             #endregion

             Matrix<int> layerSize = new Matrix<int>(new int[] { 2, 5, 1 });

             MCvANN_MLP_TrainParams parameters = new MCvANN_MLP_TrainParams();
             parameters.term_crit = new MCvTermCriteria(10, 1.0e-8);
             parameters.train_method = Emgu.CV.ML.MlEnum.ANN_MLP_TRAIN_METHOD.BACKPROP;
             parameters.bp_dw_scale = 0.1;
             parameters.bp_moment_scale = 0.1;

             using (ANN_MLP network = new ANN_MLP(layerSize, Emgu.CV.ML.MlEnum.ANN_MLP_ACTIVATION_FUNCTION.SIGMOID_SYM, 1.0, 1.0))
             {
            network.Train(trainData, trainClasses, null, null, parameters, Emgu.CV.ML.MlEnum.ANN_MLP_TRAINING_FLAG.DEFAULT);
            network.Save("ann_mlp_model.xml");

            for (int i = 0; i < img.Height; i++)
            {
               for (int j = 0; j < img.Width; j++)
               {
                  sample.Data[0, 0] = j;
                  sample.Data[0, 1] = i;
                  network.Predict(sample, prediction);

                  // estimates the response and get the neighbors' labels
                  float response = prediction.Data[0,0];

                  // highlight the pixel depending on the accuracy (or confidence)
                  img[i, j] = response < 1.5 ? new Bgr(90, 0, 0) : new Bgr(0, 90, 0);
               }
            }
             }

             // display the original training samples
             for (int i = 0; i < (trainSampleCount >> 1); i++)
             {
            PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
            img.Draw(new CircleF(p1, 2), new Bgr(255, 100, 100), -1);
            PointF p2 = new PointF((int)trainData2[i, 0], (int)trainData2[i, 1]);
            img.Draw(new CircleF(p2, 2), new Bgr(100, 255, 100), -1);
             }
        }
Ejemplo n.º 18
0
      public void TestKNearest()
      {
         int K = 10;
         int trainSampleCount = 100;

         #region Generate the training data and classes

         Matrix<float> trainData = new Matrix<float>(trainSampleCount, 2);
         Matrix<float> trainClasses = new Matrix<float>(trainSampleCount, 1);

         Image<Bgr, Byte> img = new Image<Bgr, byte>(500, 500);

         Matrix<float> sample = new Matrix<float>(1, 2);

         Matrix<float> trainData1 = trainData.GetRows(0, trainSampleCount >> 1, 1);
         trainData1.SetRandNormal(new MCvScalar(200), new MCvScalar(50));
         Matrix<float> trainData2 = trainData.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
         trainData2.SetRandNormal(new MCvScalar(300), new MCvScalar(50));

         Matrix<float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount >> 1, 1);
         trainClasses1.SetValue(1);
         Matrix<float> trainClasses2 = trainClasses.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
         trainClasses2.SetValue(2);
         #endregion
         
         Matrix<float> results, neighborResponses;
         results = new Matrix<float>(sample.Rows, 1);
         neighborResponses = new Matrix<float>(sample.Rows, K);
         //dist = new Matrix<float>(sample.Rows, K);

         using (KNearest knn = new KNearest())
         {
            knn.DefaultK = K;
            knn.IsClassifier = true;
            knn.Train(trainData, MlEnum.DataLayoutType.RowSample, trainClasses);
            //ParamDef[] defs =  knn.GetParams();
            //TODO: find out when knn.save will be implemented
            //knn.Save("knn.xml");

            for (int i = 0; i < img.Height; i++)
            {
               for (int j = 0; j < img.Width; j++)
               {
                  sample.Data[0, 0] = j;
                  sample.Data[0, 1] = i;

                  // estimates the response and get the neighbors' labels
                  float response = knn.Predict(sample); //knn.FindNearest(sample, K, results, null, neighborResponses, null);

                  int accuracy = 0;
                  // compute the number of neighbors representing the majority
                  for (int k = 0; k < K; k++)
                  {
                     if (neighborResponses.Data[0, k] == response)
                        accuracy++;
                  }
                  // highlight the pixel depending on the accuracy (or confidence)
                  img[i, j] = 
                     response == 1 ?
                        (accuracy > 5 ? new Bgr(90, 0, 0) : new Bgr(90, 40, 0)) :
                        (accuracy > 5 ? new Bgr(0, 90, 0) : new Bgr(40, 90, 0));
               }
            }

            String knnModelStr;
            //save stat model to string
            using (FileStorage fs = new FileStorage(".yml", FileStorage.Mode.Write | FileStorage.Mode.Memory))
            {
               knn.Write(fs);

               knnModelStr = fs.ReleaseAndGetString();
            }

            //load stat model from string
            using (FileStorage fs = new FileStorage(knnModelStr, FileStorage.Mode.Read | FileStorage.Mode.Memory))
            {
               KNearest knn2 = new KNearest();
               knn2.Read(fs.GetRoot());
            }
         }

         // display the original training samples
         for (int i = 0; i < (trainSampleCount >> 1); i++)
         {
            PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
            img.Draw(new CircleF(p1, 2.0f), new Bgr(255, 100, 100), -1);
            PointF p2 = new PointF(trainData2[i, 0], trainData2[i, 1]);
            img.Draw(new CircleF(p2, 2.0f), new Bgr(100, 255, 100), -1);
         }

         //Emgu.CV.UI.ImageViewer.Show(img);
      }
Ejemplo n.º 19
0
        public void TestKMeans()
        {
            int clustersCount = 5;
             int sampleCount = 300;
             int imageSize = 500;

             Bgr[] colors = new Bgr[] {
            new Bgr(0,0,255),
            new Bgr(0, 255, 0),
            new Bgr(255, 100, 100),
            new Bgr(255,0,255),
            new Bgr(0, 255, 255)};

             Image<Bgr, Byte> image = new Image<Bgr, byte>(imageSize, imageSize);

             #region generate random samples
             Matrix<float> points = new Matrix<float>(sampleCount, 1, 2);

             Matrix<int> clusters = new Matrix<int>(sampleCount, 1);
             Random r = new Random();
             for (int i = 0; i < clustersCount; i++)
             {
            Matrix<float> row = points.GetRows(i * (sampleCount / clustersCount), (i + 1) * (sampleCount / clustersCount), 1);
            row.SetRandNormal(new MCvScalar(r.Next() % imageSize , r.Next() % imageSize), new MCvScalar((r.Next() % imageSize) / 6, (r.Next() % imageSize) / 6));
             }
             CvInvoke.cvAbsDiffS(points, points, new MCvScalar());
             CvInvoke.cvRandShuffle(points, IntPtr.Zero, 1.0);
             #endregion

             CvInvoke.cvKMeans2(
            points,
            clustersCount,
            clusters,
            new MCvTermCriteria(10, 1.0),
            2,
            IntPtr.Zero,
            0,
            IntPtr.Zero,
            IntPtr.Zero);

             for (int i = 0; i < sampleCount; i++)
             {
            PointF p = new PointF(points.Data[i, 0], points.Data[i, 1]);
            image.Draw(new CircleF(p, 1.0f), colors[clusters[i, 0]], 1);
             }
        }
Ejemplo n.º 20
0
      public void TestANN_MLP()
      {
         int trainSampleCount = 100;

         #region Generate the traning data and classes
         Matrix<float> trainData = new Matrix<float>(trainSampleCount, 2);
         Matrix<float> trainClasses = new Matrix<float>(trainSampleCount, 1);

         Image<Bgr, Byte> img = new Image<Bgr, byte>(500, 500);

         Matrix<float> sample = new Matrix<float>(1, 2);
         Matrix<float> prediction = new Matrix<float>(1, 1);

         Matrix<float> trainData1 = trainData.GetRows(0, trainSampleCount >> 1, 1);
         trainData1.SetRandNormal(new MCvScalar(200), new MCvScalar(50));
         Matrix<float> trainData2 = trainData.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
         trainData2.SetRandNormal(new MCvScalar(300), new MCvScalar(50));

         Matrix<float> trainClasses1 = trainClasses.GetRows(0, trainSampleCount >> 1, 1);
         trainClasses1.SetValue(1);
         Matrix<float> trainClasses2 = trainClasses.GetRows(trainSampleCount >> 1, trainSampleCount, 1);
         trainClasses2.SetValue(2);
         #endregion

         using(Matrix<int> layerSize = new Matrix<int>(new int[] { 2, 5, 1 }))
         using(Mat layerSizeMat = layerSize.Mat)

         using (TrainData td = new TrainData(trainData, MlEnum.DataLayoutType.RowSample, trainClasses))
         using (ANN_MLP network = new ANN_MLP())
         { 
            network.SetLayerSizes(layerSizeMat);
            network.SetActivationFunction(ANN_MLP.AnnMlpActivationFunction.SigmoidSym, 0, 0);
            network.TermCriteria = new MCvTermCriteria(10, 1.0e-8);
            network.SetTrainMethod(ANN_MLP.AnnMlpTrainMethod.Backprop, 0.1, 0.1);
            network.Train(td, (int) Emgu.CV.ML.MlEnum.AnnMlpTrainingFlag.Default);

#if !NETFX_CORE
            String fileName = Path.Combine(Path.GetTempPath(), "ann_mlp_model.xml");
            network.Save(fileName);
            if (File.Exists(fileName))
               File.Delete(fileName);
#endif

            for (int i = 0; i < img.Height; i++)
            {
               for (int j = 0; j < img.Width; j++)
               {
                  sample.Data[0, 0] = j;
                  sample.Data[0, 1] = i;
                  network.Predict(sample, prediction);

                  // estimates the response and get the neighbors' labels
                  float response = prediction.Data[0, 0];

                  // highlight the pixel depending on the accuracy (or confidence)
                  img[i, j] = response < 1.5 ? new Bgr(90, 0, 0) : new Bgr(0, 90, 0);
               }
            }
         }

         // display the original training samples
         for (int i = 0; i < (trainSampleCount >> 1); i++)
         {
            PointF p1 = new PointF(trainData1[i, 0], trainData1[i, 1]);
            img.Draw(new CircleF(p1, 2), new Bgr(255, 100, 100), -1);
            PointF p2 = new PointF((int) trainData2[i, 0], (int) trainData2[i, 1]);
            img.Draw(new CircleF(p2, 2), new Bgr(100, 255, 100), -1);
         }

         //Emgu.CV.UI.ImageViewer.Show(img);
      }
Ejemplo n.º 21
0
        static void Main(string[] args)
        {
            int trainSampleCount = 0;
            Image<Bgr, Byte> img = new Image<Bgr, byte>(500, 500);

            //conversion of CSV to gesture variables
            List<Gesture> gestureListClass1 = csvToGestureList(@"C:\Users\faculty\Desktop\testEB-3-20.csv");
            List<Gesture> gestureListClass2 = csvToGestureList(@"C:\Users\faculty\Desktop\testSNC-3-20.csv");

            trainSampleCount = (gestureListClass1.Count) + (gestureListClass2.Count); //set the sample count to the number of gestures we have available

            //create relevant matrices based on size of the gestureList
            Matrix<float> sample = new Matrix<float>(1, 16);
            Matrix<float> trainTestData = new Matrix<float>(trainSampleCount, 16);
            Matrix<float> trainTestClasses = new Matrix<float>(trainSampleCount, 1);

            //GESTURE MATH INCOMING
            foreach (Gesture g in gestureListClass1)
            {
                g.runMetrics();
            }

            foreach (Gesture g in gestureListClass2)
            {
                g.runMetrics();
            }

            #region Generate the training data and classes

            //fill first set of data
            for (int i = 0; i < gestureListClass1.Count; i++)
            {
                double[] gMetrics = (gestureListClass1[i].returnMetrics()).ToArray();
                /*
                 * //add gestures to list
                        temp.Add(xyRatio); //[0]
                        temp.Add(totalGestureTime); //[1]
                        temp.Add(majorAvg); //[2]
                        temp.Add(minorAvg); //[3]

                        temp.Add(avgXVelo); //[4]
                        temp.Add(avgYVelo); //[5]

                        temp.Add(majorVariance); //[6]
                        temp.Add(minorVariance); //[7]

                        //add substrokes
                        temp.Add(firstVeloMag); //[8]
                        temp.Add(firstVeloDir); //[9]
                        temp.Add(secondVeloMag); //[10]
                        temp.Add(secondVeloDir); //[11]
                        temp.Add(thirdVeloMag); //[12]
                        temp.Add(thirdVeloDir); //[13]
                        temp.Add(fourthVeloMag); //[14]
                        temp.Add(fourthVeloDir); //[15]
                 */
                trainTestData[i, 0] = ((float)gMetrics[0]) * 150; //xy ratio
                trainTestData[i, 1] = ((float)gMetrics[1]) / 4; //totalGestureTime
                trainTestData[i, 2] = ((float)gMetrics[2]);
                trainTestData[i, 3] = ((float)gMetrics[3]);
                trainTestData[i, 4] = ((float)gMetrics[4]);
                trainTestData[i, 5] = ((float)gMetrics[5]);
                trainTestData[i, 6] = ((float)gMetrics[6]);
                trainTestData[i, 7] = ((float)gMetrics[7]);
                trainTestData[i, 8] = ((float)gMetrics[8]);
                trainTestData[i, 9] = ((float)gMetrics[9]);
                trainTestData[i, 10] = ((float)gMetrics[10]);
                trainTestData[i, 11] = ((float)gMetrics[11]);
                trainTestData[i, 12] = ((float)gMetrics[12]);
                trainTestData[i, 13] = ((float)gMetrics[13]);
                trainTestData[i, 14] = ((float)gMetrics[14]);
                trainTestData[i, 15] = ((float)gMetrics[15]);
            }

            Matrix<float> trainTestData1 = trainTestData.GetRows(0, gestureListClass1.Count, 1);

            for (int j = 0; j < gestureListClass2.Count; j++)
            {
                double[] gMetrics = (gestureListClass2[j].returnMetrics()).ToArray();

                trainTestData[(j + gestureListClass1.Count), 0] = (float)gMetrics[0] * 150;
                trainTestData[(j + gestureListClass1.Count), 1] = ((float)gMetrics[1]) / 4;
                trainTestData[(j + gestureListClass1.Count), 2] = ((float)gMetrics[2]);
                trainTestData[(j + gestureListClass1.Count), 3] = ((float)gMetrics[3]);
                trainTestData[(j + gestureListClass1.Count), 4] = ((float)gMetrics[4]);
                trainTestData[(j + gestureListClass1.Count), 5] = ((float)gMetrics[5]);
                trainTestData[(j + gestureListClass1.Count), 6] = ((float)gMetrics[6]);
                trainTestData[(j + gestureListClass1.Count), 7] = ((float)gMetrics[7]);
                trainTestData[(j + gestureListClass1.Count), 8] = ((float)gMetrics[8]);
                trainTestData[(j + gestureListClass1.Count), 9] = ((float)gMetrics[9]);
                trainTestData[(j + gestureListClass1.Count), 10] = ((float)gMetrics[10]);
                trainTestData[(j + gestureListClass1.Count), 11] = ((float)gMetrics[11]);
                trainTestData[(j + gestureListClass1.Count), 12] = ((float)gMetrics[12]);
                trainTestData[(j + gestureListClass1.Count), 13] = ((float)gMetrics[13]);
                trainTestData[(j + gestureListClass1.Count), 14] = ((float)gMetrics[14]);
                trainTestData[(j + gestureListClass1.Count), 15] = ((float)gMetrics[15]);
            }

            Matrix<float> trainTestData2 = trainTestData.GetRows(gestureListClass1.Count, trainSampleCount, 1);

            Matrix<float> trainTestClasses1 = trainTestClasses.GetRows(0, gestureListClass1.Count, 1);
            trainTestClasses1.SetValue(1);
            Matrix<float> trainTestClasses2 = trainTestClasses.GetRows(gestureListClass1.Count, trainSampleCount, 1);
            trainTestClasses2.SetValue(2);
            #endregion

            using (SVM model = new SVM())
            {
                SVMParams p = new SVMParams();
                p.KernelType = Emgu.CV.ML.MlEnum.SVM_KERNEL_TYPE.LINEAR;
                p.SVMType = Emgu.CV.ML.MlEnum.SVM_TYPE.C_SVC;
                //p.Gamma = 0.1;
                p.C = 10;
                p.TermCrit = new MCvTermCriteria(100, 0.00001);

                //bool trained = model.Train(trainTestData, trainTestClasses, null, null, p);
                bool trained = model.TrainAuto(trainTestData, trainTestClasses, null, null, p.MCvSVMParams, 5);

                for (int i = 0; i < img.Height; i++)
                {
                    for (int j = 0; j < img.Width; j++)
                    {
                        sample.Data[0, 0] = j;
                        sample.Data[0, 1] = i;

                        float response = model.Predict(sample);

                        img[i, j] =
                           response == 1 ? new Bgr(90, 0, 0) :
                           new Bgr(0, 0, 90);
                        //response == 2 ? new Bgr(0, 90, 0) :
                    }
                }

                int c = model.GetSupportVectorCount();
                for (int i = 0; i < c; i++)
                {
                    float[] v = model.GetSupportVector(i);
                    PointF p1 = new PointF(v[0], v[1]);
                    img.Draw(new CircleF(p1, 4), new Bgr(255, 255, 128), 2);
                }

                model.Save(@"C:\Users\faculty\Desktop\svm-function3coord16.xml");
            }

            // display the original training samples
            for (int i = 0; i < (trainSampleCount / 2); i++)
            {
                if (i < trainTestData1.Rows)
                {
                    PointF p1 = new PointF((trainTestData1[i, 0]), (trainTestData1[i, 1]));
                    img.Draw(new CircleF(p1, 2.0f), new Bgr(255, 100, 100), -1);
                }

                if (i < trainTestData2.Rows)
                {
                    PointF p2 = new PointF((trainTestData2[i, 0]), (trainTestData2[i, 1]));
                    img.Draw(new CircleF(p2, 2.0f), new Bgr(100, 100, 255), -1);
                }
            }

            Emgu.CV.UI.ImageViewer.Show(img);
        }