Beispiel #1
0
        public static void FindMatch(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography)
        {
            int    k = 2;
            double uniquenessThreshold = 0.8;
            double hessianThresh       = 300;

            Stopwatch watch;

            homography = null;

            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

            {
                //using (UMat uModelImage = modelImage.ToUMat(AccessType.Read))
                //using (UMat uObservedImage = observedImage.ToUMat(AccessType.Read))
                //UMat uModelImage = new UMat();
                //UMat uObservedImage = new UMat();
                //CvInvoke.convert
                {
                    Brisk surfCPU = new Brisk();
                    //SURF surfCPU = new SURF(hessianThresh);
                    //extract features from the object image
                    UMat modelDescriptors = new UMat();
                    surfCPU.DetectAndCompute(modelImage, null, modelKeyPoints, modelDescriptors, false);

                    watch = Stopwatch.StartNew();

                    // extract features from the observed image
                    UMat observedDescriptors = new UMat();
                    surfCPU.DetectAndCompute(observedImage, null, observedKeyPoints, observedDescriptors, false);
                    BFMatcher matcher = new BFMatcher(DistanceType.L2);
                    matcher.Add(modelDescriptors);

                    matcher.KnnMatch(observedDescriptors, matches, k, null);
                    mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                    mask.SetTo(new MCvScalar(255));
                    Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                    int nonZeroCount = CvInvoke.CountNonZero(mask);
                    if (nonZeroCount >= 4)
                    {
                        nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                   matches, mask, 1.5, 20);
                        if (nonZeroCount >= 4)
                        {
                            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                                                                                                  observedKeyPoints, matches, mask, 2);
                        }
                    }

                    watch.Stop();
                }
            }
            matchTime = watch.ElapsedMilliseconds;
        }
Beispiel #2
0
        public void TestBOWKmeansTrainer2()
        {
            Image <Gray, byte> box    = EmguAssert.LoadImage <Gray, byte>("box.png");
            Brisk            detector = new Brisk(30, 3, 1.0f);
            VectorOfKeyPoint kpts     = new VectorOfKeyPoint();
            Mat descriptors           = new Mat();

            detector.DetectAndCompute(box, null, kpts, descriptors, false);
            Mat descriptorsF = new Mat();

            descriptors.ConvertTo(descriptorsF, CvEnum.DepthType.Cv32F);
            //Matrix<float> descriptorsF = descriptors.Convert<float>();
            BOWKMeansTrainer trainer = new BOWKMeansTrainer(100, new MCvTermCriteria(), 3, CvEnum.KMeansInitType.PPCenters);

            trainer.Add(descriptorsF);
            Mat vocabulary = new Mat();

            trainer.Cluster(vocabulary);

            BFMatcher matcher = new BFMatcher(DistanceType.L2);

            BOWImgDescriptorExtractor extractor = new BOWImgDescriptorExtractor(detector, matcher);
            Mat vocabularyByte = new Mat();

            vocabulary.ConvertTo(vocabularyByte, CvEnum.DepthType.Cv8U);
            extractor.SetVocabulary(vocabularyByte);

            Mat descriptors2 = new Mat();

            extractor.Compute(box, kpts, descriptors2);
        }
Beispiel #3
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="imagePath"></param>
        private static ObjectFeatures DetectFeatures_Brisk(Image <Bgr, byte> image)
        {
            try
            {
                Mat modelImage = image.Mat;

                VectorOfKeyPoint modelKeyPoints = new VectorOfKeyPoint();

                using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
                {
                    Brisk brisk = new Brisk();

                    UMat descriptors = new UMat();

                    Debug.WriteLine("Detecting features and computing descriptors...");

                    brisk.DetectAndCompute(uModelImage, null, modelKeyPoints, descriptors, false);

                    Debug.WriteLine("Computation finished!");

                    return(new ObjectFeatures(modelKeyPoints, descriptors));
                }
            }
            catch (Exception e)
            {
                Debug.WriteLine("Exception when detecting features" + e.Message);
                throw;
            }
        }
Beispiel #4
0
            public void compare()
            {

                using (Mat des_t = new Mat())
                using (Image<Gray, byte> theMaster = new Image<Gray, byte>(master))
                using (Image<Gray, byte> theTest = new Image<Gray, byte>(test))
                using (Image<Gray, byte> mask = new Image<Gray, byte>(theMaster.Size))
                using (Mat des_m = new Mat())
                using (VectorOfKeyPoint kp_m = new VectorOfKeyPoint())
                using (VectorOfKeyPoint kp_t = new VectorOfKeyPoint())
                {
                    //SURF br = new SURF(12500);
                    Brisk br = new Brisk(75, 8, 14.28f);
                    //Image<Gray, byte> backGround = new Image<Gray, byte>(theMaster.Size);

                    //backGround = new Image<Gray, byte>(bg);

                    System.Diagnostics.Stopwatch timer = System.Diagnostics.Stopwatch.StartNew();
                    //CvInvoke.Subtract(backGround, theMaster, mask);
                    //Image<Gray, byte> theMask = mask.Convert<Gray, byte>();
                    //Image<Gray, byte> thetMask = tmask.Convert<Gray, byte>();
                    //theMask = theMask.ThresholdBinary(new Gray(120), new Gray(255));
                    //CvInvoke.Imshow("Mask", mask.Resize(0.2, Emgu.CV.CvEnum.Inter.Nearest));
                    
                    try
                    {
                        br.DetectAndCompute(theMaster, null, kp_m, des_m, false);
                        br.DetectAndCompute(theTest, null, kp_t, des_t, false);
                        br = null;
                    }
                    catch (CvException ex)
                    {
                        MessageBox.Show(ex.ToString());
                        throw ex;
                    }
                    finally
                    {
                        if (br != null)
                            br.Dispose();
                    }


                    BFMatcher bf = new BFMatcher(DistanceType.L2);
                    VectorOfVectorOfDMatch raw_matches = new Emgu.CV.Util.VectorOfVectorOfDMatch();

                    bf.Add(des_t);

                    try
                    {
                        bf.KnnMatch(des_m, raw_matches, 2, null);
                        bf = null;
                    }
                    catch (Exception e1)
                    {
                        MessageBox.Show(e1.Message);
                        throw;
                    }

                    MDMatch[][] dmatches_tm = raw_matches.ToArrayOfArray();

                    List<MDMatch> good_matches = new List<MDMatch>();
                    foreach (MDMatch[] m in dmatches_tm)
                    {
                        if (m[0].Distance < 0.9 * m[1].Distance && m[0].Distance > 0.7 * m[1].Distance)
                            good_matches.Add(m[0]);
                    }

                    List<int> qidx_t = new List<int>();
                    List<int> tidx_t = new List<int>();
                    int final_dist_t = 0;
                    int oct_count_t = 0;

                    foreach (MDMatch m in good_matches)
                    {
                        var a = kp_m[m.QueryIdx];
                        var b = kp_t[m.TrainIdx];
                        if ((Convert.ToInt32(a.Octave) == Convert.ToInt32(b.Octave))
                            if(Math.Abs(a.Size - b.Size)<1))
                        {
                            oct_count_t += 1;
                            tidx_t.Add(m.TrainIdx);
                            qidx_t.Add(m.QueryIdx);
                        }
                    }
                    
                    for (int i = 0; i < qidx_t.Count; i++)
                    {
                        int q_base = qidx_t[i];
                        int t_base = tidx_t[i];
                        final_dist_t--;

                        for (int j = i; j < qidx_t.Count; j++)
                        {
                            float q = eucledianDist(kp_m[qidx_t[j]].Point, kp_m[q_base].Point);
                            float t = eucledianDist(kp_t[tidx_t[j]].Point, kp_t[t_base].Point);

                            if (Math.Abs(q - t) <= ( 0.06 * q ))
                                final_dist_t++;
                        }
                    }

                    
                    //Image<Bgr, byte> resultImg = new Image<Bgr, byte>(theMaster.Size);
                    //Features2DToolbox.DrawMatches(theMaster, kp_m, theTest, kp_t, raw_matches, resultImg, new MCvScalar(0), new MCvScalar(155));
                    //CvInvoke.Imshow("Res", resultImg.Resize(0.25, Emgu.CV.CvEnum.Inter.Nearest));
                    
                    float raw_len = dmatches_tm.Length;
                    float final_dist = final_dist_t;
                    float per = (final_dist / raw_len) * 100;
                    
                    long time_taken = timer.ElapsedMilliseconds;
                    MessageBox.Show("Result " + per + "\nTime: " + time_taken + 
                        "\nmKP" + kp_m.Size + "\ntKP" + kp_t.Size + 
                        "\ngood_kp" + good_matches.Count + "\noct_cnt" + oct_count_t + "\ndist "+ final_dist);
                }
            }
Beispiel #5
0
            public string compareAll()
            {

                using (Mat des_t = new Mat())
                using (Image<Gray, byte> theMaster = new Image<Gray, byte>(master))
                using (Image<Gray, byte> theTest = new Image<Gray, byte>(test))
                using (Image<Gray, byte> mask = new Image<Gray, byte>(theMaster.Size))
                using (Mat des_m = new Mat())
                using (VectorOfKeyPoint kp_m = new VectorOfKeyPoint())
                using (VectorOfKeyPoint kp_t = new VectorOfKeyPoint())
                {
                    //SURF br = new SURF(10000, 8, 4);
                    Brisk br = new Brisk(80, 7, 4f);
                    //Image<Gray, byte> backGround = new Image<Gray, byte>(theMaster.Size);

                    //backGround = new Image<Gray, byte>(bg);

                    System.Diagnostics.Stopwatch timer = System.Diagnostics.Stopwatch.StartNew();
                    //CvInvoke.Subtract(backGround, theMaster, mask);
                    //Image<Gray, byte> theMask = mask.Convert<Gray, byte>();
                    //Image<Gray, byte> thetMask = tmask.Convert<Gray, byte>();
                    //theMask = theMask.ThresholdBinary(new Gray(120), new Gray(255));
                    //CvInvoke.Imshow("Mask", mask.Resize(0.2, Emgu.CV.CvEnum.Inter.Nearest));

                    try
                    {
                        br.DetectAndCompute(theMaster, null, kp_m, des_m, false);
                        br.DetectAndCompute(theTest, null, kp_t, des_t, false);
                        br = null;
                    }
                    catch (CvException ex)
                    {
                        MessageBox.Show(ex.ToString());
                        throw ex;
                    }
                    finally
                    {
                        if (br != null)
                            br.Dispose();
                    }


                    BFMatcher bf = new BFMatcher(DistanceType.L2);
                    VectorOfVectorOfDMatch raw_matches = new Emgu.CV.Util.VectorOfVectorOfDMatch();

                    bf.Add(des_t);

                    try
                    {
                        bf.KnnMatch(des_m, raw_matches, 2, null);
                        bf = null;
                    }
                    catch (Exception e1)
                    {
                        MessageBox.Show(e1.Message);
                        throw;
                    }

                    MDMatch[][] dmatches_tm = raw_matches.ToArrayOfArray();

                    List<MDMatch> good_matches = new List<MDMatch>();
                    foreach (MDMatch[] m in dmatches_tm)
                    {
                        if (m[0].Distance < 0.8 * m[1].Distance && m[0].Distance > 0.7 * m[1].Distance)
                            good_matches.Add(m[0]);
                    }

                    List<int> qidx_t = new List<int>();
                    List<int> tidx_t = new List<int>();
                    int final_dist_t = 0;
                    int oct_count_t = 0;

                    foreach (MDMatch m in good_matches)
                    {
                        var a = kp_m[m.QueryIdx];
                        var b = kp_t[m.TrainIdx];
                        //if(Convert.ToInt32(a.Octave) == Convert.ToInt32(b.Octave))
                        //if (Math.Abs(a.Size - b.Size) < 1f)
                        {
                            oct_count_t += 1;
                            tidx_t.Add(m.TrainIdx);
                            qidx_t.Add(m.QueryIdx);
                        }
                    }

                    for (int i = 0; i < qidx_t.Count; i++)
                    {
                        int q_base = qidx_t[i];
                        int t_base = tidx_t[i];

                        for (int j = i+1; j < qidx_t.Count; j++)
                        {
                            float q = eucledianDist(kp_m[qidx_t[j]].Point, kp_m[q_base].Point);
                            float t = eucledianDist(kp_t[tidx_t[j]].Point, kp_t[t_base].Point);

                            if (Math.Abs(q - t) <= (0.005f * q))
                                final_dist_t++;
                        }
                    }


                    //Image<Bgr, byte> resultImg = new Image<Bgr, byte>(theMaster.Size);
                    //Features2DToolbox.DrawMatches(theMaster, kp_m, theTest, kp_t, raw_matches, resultImg, new MCvScalar(0), new MCvScalar(155));
                    //CvInvoke.Imshow("Res", resultImg.Resize(0.3, Emgu.CV.CvEnum.Inter.Nearest));

                    float raw_len = raw_matches.Size;
                    float final_dist = final_dist_t;
                    float per = (final_dist / raw_len) * 100;
                    float rat = 2000f * (final_dist_t / (float)oct_count_t) / raw_len;
                    float frac = (final_dist / oct_count_t);
                    float f = (final_dist * oct_count_t) / (raw_len); //result
                    long time_taken = timer.ElapsedMilliseconds;
                    var name = test.Substring(35).Split('.')[0];
                    string result = string.Format("{0},{1},{2},{3},{4},{6},{7}", name, raw_len, oct_count_t, final_dist, per, rat, frac, f);
                    return result;
                }
            }