Example #1
0
        /// <summary>
        /// Stitch images together
        /// </summary>
        /// <param name="images">The list of images to stitch</param>
        /// <returns>A final stitched image</returns>
        public static Mat StichImages(List <Mat> images)
        {
            //Declare the Mat object that will store the final output
            Mat output = new Mat();

            //Declare a vector to store all images from the list
            VectorOfMat matVector = new VectorOfMat();

            //Push all images in the list into a vector
            foreach (Mat img in images)
            {
                matVector.Push(img);
            }

            //Declare a new stitcher
            Stitcher stitcher = new Stitcher();

            //Declare the type of detector that will be used to detect keypoints
            Brisk detector = new Brisk();

            //Here are some other detectors that you can try
            //ORBDetector detector = new ORBDetector();
            //KAZE detector = new KAZE();
            //AKAZE detector = new AKAZE();

            //Set the stitcher class to use the specified detector declared above
            stitcher.SetFeaturesFinder(detector);

            //Stitch the images together
            stitcher.Stitch(matVector, output);

            //Return the final stiched image
            return(output);
        }
Example #2
0
        public Image <Bgr, byte> pointComp(Image <Bgr, byte> baseImg, Image <Bgr, byte> twistedImg)
        {
            Image <Gray, byte> baseImgGray    = baseImg.Convert <Gray, byte>();
            Image <Gray, byte> twistedImgGray = twistedImg.Convert <Gray, byte>();
            Brisk            descriptor       = new Brisk();
            GFTTDetector     detector         = new GFTTDetector(40, 0.01, 5, 3, true);
            VectorOfKeyPoint GFP1             = new VectorOfKeyPoint();
            UMat             baseDesc         = new UMat();
            UMat             bimg             = twistedImgGray.Mat.GetUMat(AccessType.Read);
            VectorOfKeyPoint GFP2             = new VectorOfKeyPoint();
            UMat             twistedDesc      = new UMat();
            UMat             timg             = baseImgGray.Mat.GetUMat(AccessType.Read);

            detector.DetectRaw(bimg, GFP1);
            descriptor.Compute(bimg, GFP1, baseDesc);
            detector.DetectRaw(timg, GFP2);
            descriptor.Compute(timg, GFP2, twistedDesc);
            BFMatcher matcher = new BFMatcher(DistanceType.L2);
            VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();

            matcher.Add(baseDesc);
            matcher.KnnMatch(twistedDesc, matches, 2, null);
            Mat mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);

            mask.SetTo(new MCvScalar(255));
            Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);
            //int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(GFP1, GFP1, matches, mask, 1.5, 20);
            Image <Bgr, byte> res = baseImg.CopyBlank();

            Features2DToolbox.DrawMatches(twistedImg, GFP1, baseImg, GFP2, matches, res, new MCvScalar(255, 0, 0), new MCvScalar(255, 0, 0), mask);
            return(res);
        }
Example #3
0
        public MKeyPoint[] Maspointer(Image <Bgr, byte> image, int mode)
        {
            switch (mode)
            {
            case 0:
            {
                GFTTDetector detector = new GFTTDetector(40, 0.01, 5, 3, true);
                MKeyPoint[]  GFP1     = detector.Detect(image.Convert <Gray, byte>().Mat);
                return(GFP1);
            }

            case 1:
            {
                Brisk       detector = new Brisk();
                MKeyPoint[] GFP1     = detector.Detect(image.Convert <Gray, byte>().Mat);
                return(GFP1);
            }

            case 2:
            {
                FastFeatureDetector detector = new FastFeatureDetector();
                MKeyPoint[]         GFP1     = detector.Detect(image.Convert <Gray, byte>().Mat);
                return(GFP1);
            }
            }
            return(null);
        }
Example #4
0
        public void TestBOWKmeansTrainer2()
        {
            Image <Gray, byte> box    = EmguAssert.LoadImage <Gray, byte>("box.png");
            Brisk            detector = new Brisk(30, 3, 1.0f);
            VectorOfKeyPoint kpts     = new VectorOfKeyPoint();
            Mat descriptors           = new Mat();

            detector.DetectAndCompute(box, null, kpts, descriptors, false);
            Mat descriptorsF = new Mat();

            descriptors.ConvertTo(descriptorsF, CvEnum.DepthType.Cv32F);
            //Matrix<float> descriptorsF = descriptors.Convert<float>();
            BOWKMeansTrainer trainer = new BOWKMeansTrainer(100, new MCvTermCriteria(), 3, CvEnum.KMeansInitType.PPCenters);

            trainer.Add(descriptorsF);
            Mat vocabulary = new Mat();

            trainer.Cluster(vocabulary);

            BFMatcher matcher = new BFMatcher(DistanceType.L2);

            BOWImgDescriptorExtractor extractor = new BOWImgDescriptorExtractor(detector, matcher);
            Mat vocabularyByte = new Mat();

            vocabulary.ConvertTo(vocabularyByte, CvEnum.DepthType.Cv8U);
            extractor.SetVocabulary(vocabularyByte);

            Mat descriptors2 = new Mat();

            extractor.Compute(box, kpts, descriptors2);
        }
Example #5
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="imagePath"></param>
        private static ObjectFeatures DetectFeatures_Brisk(Image <Bgr, byte> image)
        {
            try
            {
                Mat modelImage = image.Mat;

                VectorOfKeyPoint modelKeyPoints = new VectorOfKeyPoint();

                using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
                {
                    Brisk brisk = new Brisk();

                    UMat descriptors = new UMat();

                    Debug.WriteLine("Detecting features and computing descriptors...");

                    brisk.DetectAndCompute(uModelImage, null, modelKeyPoints, descriptors, false);

                    Debug.WriteLine("Computation finished!");

                    return(new ObjectFeatures(modelKeyPoints, descriptors));
                }
            }
            catch (Exception e)
            {
                Debug.WriteLine("Exception when detecting features" + e.Message);
                throw;
            }
        }
Example #6
0
        private void button3_Click(object sender, EventArgs e)
        {
            GFTTDetector detector = new GFTTDetector(40, 0.01, 5, 3, true);

            var baseImgGray    = baseImg.Convert <Gray, byte>();
            var twistedImgGray = twistedImg.Convert <Gray, byte>();

            //генератор описания ключевых точек
            Brisk descriptor = new Brisk();

            //поскольку в данном случае необходимо посчитать обратное преобразование
            //базой будет являться изменённое изображение
            VectorOfKeyPoint GFP1     = new VectorOfKeyPoint();
            UMat             baseDesc = new UMat();
            UMat             bimg     = twistedImgGray.Mat.GetUMat(AccessType.Read);

            VectorOfKeyPoint GFP2        = new VectorOfKeyPoint();
            UMat             twistedDesc = new UMat();
            UMat             timg        = baseImgGray.Mat.GetUMat(AccessType.Read);

            //получение необработанной информации о характерных точках изображений
            detector.DetectRaw(bimg, GFP1);

            //генерация описания характерных точек изображений
            descriptor.Compute(bimg, GFP1, baseDesc);
            detector.DetectRaw(timg, GFP2);
            descriptor.Compute(timg, GFP2, twistedDesc);

            //класс позволяющий сравнивать описания наборов ключевых точек
            BFMatcher matcher = new BFMatcher(DistanceType.L2);

            //массив для хранения совпадений характерных точек
            VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();

            //добавление описания базовых точек
            matcher.Add(baseDesc);
            //сравнение с описанием изменённых
            matcher.KnnMatch(twistedDesc, matches, 2, null);
            //3й параметр - количество ближайших соседей среди которых осуществляется поиск совпадений
            //4й параметр - маска, в данном случае не нужна

            //маска для определения отбрасываемых значений (аномальных и не уникальных)
            Mat mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);

            mask.SetTo(new MCvScalar(255));
            //определение уникальных совпадений
            Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);

            Mat homography;

            //получение матрицы гомографии
            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(GFP1, GFP2, matches, mask, 2);

            var destImage = new Image <Bgr, byte>(baseImg.Size);

            CvInvoke.WarpPerspective(twistedImg, destImage, homography, destImage.Size);
            twistedImg      = destImage;
            imageBox2.Image = destImage.Resize(640, 480, Inter.Linear);
        }
 public CVObjectDetector()
 {
     // Brisk, KAZE, AKAZE, ORBDetector, HarrisLaplaceFeatureDetector, FastDetector
     featureDetector = new Brisk();
     // Brisk, ORBDetector, BriefDescriptorExtractor, Freak
     featureDescriptor = new Brisk();
     featureMatcher    = new BFMatcher(DistanceType.L2);
 }
Example #8
0
        public static void FindMatch(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography)
        {
            int    k = 2;
            double uniquenessThreshold = 0.8;
            double hessianThresh       = 300;

            Stopwatch watch;

            homography = null;

            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

            {
                //using (UMat uModelImage = modelImage.ToUMat(AccessType.Read))
                //using (UMat uObservedImage = observedImage.ToUMat(AccessType.Read))
                //UMat uModelImage = new UMat();
                //UMat uObservedImage = new UMat();
                //CvInvoke.convert
                {
                    Brisk surfCPU = new Brisk();
                    //SURF surfCPU = new SURF(hessianThresh);
                    //extract features from the object image
                    UMat modelDescriptors = new UMat();
                    surfCPU.DetectAndCompute(modelImage, null, modelKeyPoints, modelDescriptors, false);

                    watch = Stopwatch.StartNew();

                    // extract features from the observed image
                    UMat observedDescriptors = new UMat();
                    surfCPU.DetectAndCompute(observedImage, null, observedKeyPoints, observedDescriptors, false);
                    BFMatcher matcher = new BFMatcher(DistanceType.L2);
                    matcher.Add(modelDescriptors);

                    matcher.KnnMatch(observedDescriptors, matches, k, null);
                    mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                    mask.SetTo(new MCvScalar(255));
                    Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                    int nonZeroCount = CvInvoke.CountNonZero(mask);
                    if (nonZeroCount >= 4)
                    {
                        nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                   matches, mask, 1.5, 20);
                        if (nonZeroCount >= 4)
                        {
                            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                                                                                                  observedKeyPoints, matches, mask, 2);
                        }
                    }

                    watch.Stop();
                }
            }
            matchTime = watch.ElapsedMilliseconds;
        }
Example #9
0
        public Image <Bgr, byte> ReturnCompared(out Image <Bgr, byte> def, out Image <Bgr, byte> twistdef)
        {
            var image      = sourceImage.Copy();
            var twistedImg = additionalImage.Copy();
            //генератор описания ключевых точек
            Brisk        descriptor = new Brisk();
            GFTTDetector detector   = new GFTTDetector(40, 0.01, 5, 3, true);
            //поскольку в данном случае необходимо посчитать обратное преобразование
            //базой будет являться изменённое изображение
            VectorOfKeyPoint GFP1           = new VectorOfKeyPoint();
            UMat             baseDesc       = new UMat();
            var              twistedImgGray = twistedImg.Convert <Gray, byte>();
            var              baseImgGray    = image.Convert <Gray, byte>();
            UMat             bimg           = twistedImgGray.Mat.GetUMat(AccessType.Read);
            VectorOfKeyPoint GFP2           = new VectorOfKeyPoint();
            UMat             twistedDesc    = new UMat();
            UMat             timg           = baseImgGray.Mat.GetUMat(AccessType.Read);

            //получение необработанной информации о характерных точках изображений
            detector.DetectRaw(bimg, GFP1);
            //генерация описания характерных точек изображений
            descriptor.Compute(bimg, GFP1, baseDesc);
            detector.DetectRaw(timg, GFP2);
            descriptor.Compute(timg, GFP2, twistedDesc);


            BFMatcher matcher = new BFMatcher(DistanceType.L2);

            //массив для хранения совпадений характерных точек
            VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();

            //добавление описания базовых точек
            matcher.Add(baseDesc);
            //сравнение с описанием изменённых
            matcher.KnnMatch(twistedDesc, matches, 2, null);


            Mat mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);

            mask.SetTo(new MCvScalar(255));
            //определение уникальных совпадений
            Mat resM = new Mat(image.Height, image.Width * 2, DepthType.Cv8U, 3);
            var res  = resM.ToImage <Bgr, byte>();

            Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);
            int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(GFP1, GFP1, matches, mask, 1.5, 20);

            Features2DToolbox.DrawMatches(twistedImg, GFP1, image, GFP2, matches, res, new MCvScalar(255, 0,
                                                                                                     0), new MCvScalar(255, 0, 0), mask);
            def      = image;
            twistdef = twistedImg;
            return(res);
        }
Example #10
0
        public Image <Bgr, byte> PointHomo(Image <Bgr, byte> image, Image <Bgr, byte> image2)
        {
            Image <Gray, byte> baseImgGray    = image.Convert <Gray, byte>();
            Image <Gray, byte> twistedImgGray = image2.Convert <Gray, byte>();
            Brisk            descriptor       = new Brisk();
            GFTTDetector     detector         = new GFTTDetector(40, 0.01, 5, 3, true);
            VectorOfKeyPoint GFP1             = new VectorOfKeyPoint();
            UMat             baseDesc         = new UMat();
            UMat             bimg             = twistedImgGray.Mat.GetUMat(AccessType.Read);
            VectorOfKeyPoint GFP2             = new VectorOfKeyPoint();
            UMat             twistedDesc      = new UMat();
            UMat             timg             = baseImgGray.Mat.GetUMat(AccessType.Read);

            detector.DetectRaw(bimg, GFP1);
            descriptor.Compute(bimg, GFP1, baseDesc);
            detector.DetectRaw(timg, GFP2);
            descriptor.Compute(timg, GFP2, twistedDesc);
            BFMatcher matcher = new BFMatcher(DistanceType.L2);
            VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();

            matcher.Add(baseDesc);
            matcher.KnnMatch(twistedDesc, matches, 2, null);
            Mat mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);

            mask.SetTo(new MCvScalar(255));
            Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);
            int nonZeroCount      = Features2DToolbox.VoteForSizeAndOrientation(GFP1, GFP1, matches, mask, 1.5, 20);
            Image <Bgr, byte> res = image.CopyBlank();

            Features2DToolbox.DrawMatches(image2, GFP1, image, GFP2, matches, res, new MCvScalar(255, 0, 0), new MCvScalar(255, 0, 0), mask);

            Mat homography;

            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(GFP1, GFP2, matches, mask, 2);
            var destImage = new Image <Bgr, byte>(image2.Size);

            CvInvoke.WarpPerspective(image2, destImage, homography, destImage.Size);

            return(destImage);
        }
Example #11
0
        private void btnBrisk_Click(object sender, EventArgs e)
        {
            var temproot = RootImg.Clone();
            var tempimg1 = WorkingImg.Clone();
            Image <Bgr, byte> colorimg   = tempimg1.Convert <Bgr, byte>();
            Image <Bgr, byte> tempOriImg = temproot.Convert <Bgr, byte>();
            //var f2d = new Brisk((int)nudbri1.Value ,(int)nudbri1.Value , (int)nudbri1.Value );
            var f2d = new Brisk();

            var keypoint = f2d.Detect(WorkingImg);

            foreach (var point in keypoint)
            {
                System.Drawing.Rectangle rect = new Rectangle();
                rect.X      = (int)point.Point.X;
                rect.Y      = (int)point.Point.Y;
                rect.Width  = (int)point.Size;
                rect.Height = (int)point.Size;
                tempOriImg.Draw(rect, new Bgr(60, 200, 10), 2);
            }

            rtxLog.AppendText("btnBrisk_Click" + Environment.NewLine);
            RegistHisroty(tempOriImg);
        }
Example #12
0
 internal static extern void cv_features2d_BRISK_compute(Brisk extractor, Arr image, KeyPointCollection keypoints, Arr descriptors);
Example #13
0
 internal static extern int cv_features2d_BRISK_descriptorType(Brisk extractor);
Example #14
0
        public void TestBrisk()
        {
            Brisk detector = new Brisk();

            EmguAssert.IsTrue(TestFeature2DTracker(detector, detector), "Unable to find homography matrix");
        }
Example #15
0
            public string compareAll()
            {

                using (Mat des_t = new Mat())
                using (Image<Gray, byte> theMaster = new Image<Gray, byte>(master))
                using (Image<Gray, byte> theTest = new Image<Gray, byte>(test))
                using (Image<Gray, byte> mask = new Image<Gray, byte>(theMaster.Size))
                using (Mat des_m = new Mat())
                using (VectorOfKeyPoint kp_m = new VectorOfKeyPoint())
                using (VectorOfKeyPoint kp_t = new VectorOfKeyPoint())
                {
                    //SURF br = new SURF(10000, 8, 4);
                    Brisk br = new Brisk(80, 7, 4f);
                    //Image<Gray, byte> backGround = new Image<Gray, byte>(theMaster.Size);

                    //backGround = new Image<Gray, byte>(bg);

                    System.Diagnostics.Stopwatch timer = System.Diagnostics.Stopwatch.StartNew();
                    //CvInvoke.Subtract(backGround, theMaster, mask);
                    //Image<Gray, byte> theMask = mask.Convert<Gray, byte>();
                    //Image<Gray, byte> thetMask = tmask.Convert<Gray, byte>();
                    //theMask = theMask.ThresholdBinary(new Gray(120), new Gray(255));
                    //CvInvoke.Imshow("Mask", mask.Resize(0.2, Emgu.CV.CvEnum.Inter.Nearest));

                    try
                    {
                        br.DetectAndCompute(theMaster, null, kp_m, des_m, false);
                        br.DetectAndCompute(theTest, null, kp_t, des_t, false);
                        br = null;
                    }
                    catch (CvException ex)
                    {
                        MessageBox.Show(ex.ToString());
                        throw ex;
                    }
                    finally
                    {
                        if (br != null)
                            br.Dispose();
                    }


                    BFMatcher bf = new BFMatcher(DistanceType.L2);
                    VectorOfVectorOfDMatch raw_matches = new Emgu.CV.Util.VectorOfVectorOfDMatch();

                    bf.Add(des_t);

                    try
                    {
                        bf.KnnMatch(des_m, raw_matches, 2, null);
                        bf = null;
                    }
                    catch (Exception e1)
                    {
                        MessageBox.Show(e1.Message);
                        throw;
                    }

                    MDMatch[][] dmatches_tm = raw_matches.ToArrayOfArray();

                    List<MDMatch> good_matches = new List<MDMatch>();
                    foreach (MDMatch[] m in dmatches_tm)
                    {
                        if (m[0].Distance < 0.8 * m[1].Distance && m[0].Distance > 0.7 * m[1].Distance)
                            good_matches.Add(m[0]);
                    }

                    List<int> qidx_t = new List<int>();
                    List<int> tidx_t = new List<int>();
                    int final_dist_t = 0;
                    int oct_count_t = 0;

                    foreach (MDMatch m in good_matches)
                    {
                        var a = kp_m[m.QueryIdx];
                        var b = kp_t[m.TrainIdx];
                        //if(Convert.ToInt32(a.Octave) == Convert.ToInt32(b.Octave))
                        //if (Math.Abs(a.Size - b.Size) < 1f)
                        {
                            oct_count_t += 1;
                            tidx_t.Add(m.TrainIdx);
                            qidx_t.Add(m.QueryIdx);
                        }
                    }

                    for (int i = 0; i < qidx_t.Count; i++)
                    {
                        int q_base = qidx_t[i];
                        int t_base = tidx_t[i];

                        for (int j = i+1; j < qidx_t.Count; j++)
                        {
                            float q = eucledianDist(kp_m[qidx_t[j]].Point, kp_m[q_base].Point);
                            float t = eucledianDist(kp_t[tidx_t[j]].Point, kp_t[t_base].Point);

                            if (Math.Abs(q - t) <= (0.005f * q))
                                final_dist_t++;
                        }
                    }


                    //Image<Bgr, byte> resultImg = new Image<Bgr, byte>(theMaster.Size);
                    //Features2DToolbox.DrawMatches(theMaster, kp_m, theTest, kp_t, raw_matches, resultImg, new MCvScalar(0), new MCvScalar(155));
                    //CvInvoke.Imshow("Res", resultImg.Resize(0.3, Emgu.CV.CvEnum.Inter.Nearest));

                    float raw_len = raw_matches.Size;
                    float final_dist = final_dist_t;
                    float per = (final_dist / raw_len) * 100;
                    float rat = 2000f * (final_dist_t / (float)oct_count_t) / raw_len;
                    float frac = (final_dist / oct_count_t);
                    float f = (final_dist * oct_count_t) / (raw_len); //result
                    long time_taken = timer.ElapsedMilliseconds;
                    var name = test.Substring(35).Split('.')[0];
                    string result = string.Format("{0},{1},{2},{3},{4},{6},{7}", name, raw_len, oct_count_t, final_dist, per, rat, frac, f);
                    return result;
                }
            }
Example #16
0
            public void compare()
            {

                using (Mat des_t = new Mat())
                using (Image<Gray, byte> theMaster = new Image<Gray, byte>(master))
                using (Image<Gray, byte> theTest = new Image<Gray, byte>(test))
                using (Image<Gray, byte> mask = new Image<Gray, byte>(theMaster.Size))
                using (Mat des_m = new Mat())
                using (VectorOfKeyPoint kp_m = new VectorOfKeyPoint())
                using (VectorOfKeyPoint kp_t = new VectorOfKeyPoint())
                {
                    //SURF br = new SURF(12500);
                    Brisk br = new Brisk(75, 8, 14.28f);
                    //Image<Gray, byte> backGround = new Image<Gray, byte>(theMaster.Size);

                    //backGround = new Image<Gray, byte>(bg);

                    System.Diagnostics.Stopwatch timer = System.Diagnostics.Stopwatch.StartNew();
                    //CvInvoke.Subtract(backGround, theMaster, mask);
                    //Image<Gray, byte> theMask = mask.Convert<Gray, byte>();
                    //Image<Gray, byte> thetMask = tmask.Convert<Gray, byte>();
                    //theMask = theMask.ThresholdBinary(new Gray(120), new Gray(255));
                    //CvInvoke.Imshow("Mask", mask.Resize(0.2, Emgu.CV.CvEnum.Inter.Nearest));
                    
                    try
                    {
                        br.DetectAndCompute(theMaster, null, kp_m, des_m, false);
                        br.DetectAndCompute(theTest, null, kp_t, des_t, false);
                        br = null;
                    }
                    catch (CvException ex)
                    {
                        MessageBox.Show(ex.ToString());
                        throw ex;
                    }
                    finally
                    {
                        if (br != null)
                            br.Dispose();
                    }


                    BFMatcher bf = new BFMatcher(DistanceType.L2);
                    VectorOfVectorOfDMatch raw_matches = new Emgu.CV.Util.VectorOfVectorOfDMatch();

                    bf.Add(des_t);

                    try
                    {
                        bf.KnnMatch(des_m, raw_matches, 2, null);
                        bf = null;
                    }
                    catch (Exception e1)
                    {
                        MessageBox.Show(e1.Message);
                        throw;
                    }

                    MDMatch[][] dmatches_tm = raw_matches.ToArrayOfArray();

                    List<MDMatch> good_matches = new List<MDMatch>();
                    foreach (MDMatch[] m in dmatches_tm)
                    {
                        if (m[0].Distance < 0.9 * m[1].Distance && m[0].Distance > 0.7 * m[1].Distance)
                            good_matches.Add(m[0]);
                    }

                    List<int> qidx_t = new List<int>();
                    List<int> tidx_t = new List<int>();
                    int final_dist_t = 0;
                    int oct_count_t = 0;

                    foreach (MDMatch m in good_matches)
                    {
                        var a = kp_m[m.QueryIdx];
                        var b = kp_t[m.TrainIdx];
                        if ((Convert.ToInt32(a.Octave) == Convert.ToInt32(b.Octave))
                            if(Math.Abs(a.Size - b.Size)<1))
                        {
                            oct_count_t += 1;
                            tidx_t.Add(m.TrainIdx);
                            qidx_t.Add(m.QueryIdx);
                        }
                    }
                    
                    for (int i = 0; i < qidx_t.Count; i++)
                    {
                        int q_base = qidx_t[i];
                        int t_base = tidx_t[i];
                        final_dist_t--;

                        for (int j = i; j < qidx_t.Count; j++)
                        {
                            float q = eucledianDist(kp_m[qidx_t[j]].Point, kp_m[q_base].Point);
                            float t = eucledianDist(kp_t[tidx_t[j]].Point, kp_t[t_base].Point);

                            if (Math.Abs(q - t) <= ( 0.06 * q ))
                                final_dist_t++;
                        }
                    }

                    
                    //Image<Bgr, byte> resultImg = new Image<Bgr, byte>(theMaster.Size);
                    //Features2DToolbox.DrawMatches(theMaster, kp_m, theTest, kp_t, raw_matches, resultImg, new MCvScalar(0), new MCvScalar(155));
                    //CvInvoke.Imshow("Res", resultImg.Resize(0.25, Emgu.CV.CvEnum.Inter.Nearest));
                    
                    float raw_len = dmatches_tm.Length;
                    float final_dist = final_dist_t;
                    float per = (final_dist / raw_len) * 100;
                    
                    long time_taken = timer.ElapsedMilliseconds;
                    MessageBox.Show("Result " + per + "\nTime: " + time_taken + 
                        "\nmKP" + kp_m.Size + "\ntKP" + kp_t.Size + 
                        "\ngood_kp" + good_matches.Count + "\noct_cnt" + oct_count_t + "\ndist "+ final_dist);
                }
            }
Example #17
0
 internal static extern void cv_features2d_BRISK_detect(Brisk detector, Arr image, KeyPointCollection keypoints, Arr mask);