Exemplo n.º 1
0
        /// <summary>
        /// 计算文件名符合特定格式的文件名的图像序列的孔隙率
        /// 默认情况下,白色是孔隙相,黑色是固体相,此时不需要对图像进行反相
        /// example :
        ///         ComputeParameters.ComputePorosity(@".\result\", "{0:D4}.bmp", 10, 21, ref porosity);
        /// </summary>
        /// /// <param name="folder">文件夹名称</param>
        /// <param name="pattern">文件名的格式</param>
        /// <param name="startIndex">起始index</param>
        /// <param name="endIndex">末尾index</param>
        /// <param name="porosity">孔隙率</param>
        /// <param name="needReverse">是否需要反相</param>
        /// <returns></returns>
        public static bool ComputePorosity(string folder, string pattern, int startIndex, int endIndex, ref double porosity, bool needReverse = false)
        {
            bool   res = true;
            string fn  = "";

            porosity = 0.0;
            Matrix <double> pMat = new Matrix <double>(endIndex - startIndex + 1, 1);
            Mat             img;

            for (int i = startIndex; i <= endIndex; i++)
            {
                fn  = string.Format(folder + pattern, i);
                img = CvInvoke.Imread(fn, Emgu.CV.CvEnum.LoadImageType.Grayscale);
                if (img.IsEmpty)
                {
                    res = false;
                    break;
                }
                if (needReverse)
                {
                    CvInvoke.BitwiseNot(img, img);
                }
                pMat[i - startIndex, 0] = CvInvoke.CountNonZero(img) * 1.0 / (img.Cols * img.Rows);
                porosity += pMat[i - startIndex, 0];
            }
            if (res)
            {
                DataReadWriteHelper.RecordInfo("porosity.txt", PackingSystemSetting.ResultDir, pMat, false);
            }
            return(res);
        }
Exemplo n.º 2
0
        public static void FindMatch(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography)
        {
            int    k = 2;
            double uniquenessThreshold = 0.80;
            double hessianThresh       = 100;

            Stopwatch watch;

            homography = null;

            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

            using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
                using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read))
                {
                    SURF surfCPU = new SURF(hessianThresh);
                    SIFT siftCPU = new SIFT();


                    //extract features from the object image
                    UMat modelDescriptors = new UMat();

                    //surfCPU.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);
                    siftCPU.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

                    watch = Stopwatch.StartNew();

                    // extract features from the observed image
                    UMat observedDescriptors = new UMat();

                    //surfCPU.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);
                    siftCPU.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);

                    BFMatcher matcher = new BFMatcher(DistanceType.L2);
                    matcher.Add(modelDescriptors);

                    matcher.KnnMatch(observedDescriptors, matches, k, null);
                    mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                    mask.SetTo(new MCvScalar(255));

                    Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);
                    //Features2DToolbox.VoteForUniqueness(matches, 1, mask);

                    int nonZeroCount = CvInvoke.CountNonZero(mask);
                    if (nonZeroCount >= 4)
                    {
                        nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                   matches, mask, 1.5, 20);
                        if (nonZeroCount >= 4)
                        {
                            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                                                                                                  observedKeyPoints, matches, mask, 2);
                        }
                    }

                    watch.Stop();
                }
            matchTime = watch.ElapsedMilliseconds;
        }
        public static void FindMatch(
            Mat modelImage,
            Mat observedImage,
            out VectorOfKeyPoint modelKeyPoints,
            out VectorOfKeyPoint observedKeyPoints,
            VectorOfVectorOfDMatch matches,
            out Mat mask,
            out Mat homography)
        {
            int    k = 9;
            double uniquenessThreshold = 0.80;

            Stopwatch watch;

            homography = null;

            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

            using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
                using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read))
                {
                    KAZE featureDetector = new KAZE();

                    //extract features from the object image
                    Mat modelDescriptors = new Mat();
                    featureDetector.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

                    watch = Stopwatch.StartNew();

                    // extract features from the observed image
                    Mat observedDescriptors = new Mat();
                    featureDetector.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);

                    using (Emgu.CV.Flann.LinearIndexParams ip = new Emgu.CV.Flann.LinearIndexParams())
                        using (Emgu.CV.Flann.SearchParams sp = new SearchParams())
                            using (DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp))
                            {
                                matcher.Add(modelDescriptors);

                                matcher.KnnMatch(observedDescriptors, matches, k, null);
                                mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                                mask.SetTo(new MCvScalar(255));
                                Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                                int nonZeroCount = CvInvoke.CountNonZero(mask);
                                if (nonZeroCount >= 4)
                                {
                                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                               matches, mask, 1.5, 20);
                                    if (nonZeroCount >= 4)
                                    {
                                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                                                                                                              observedKeyPoints, matches, mask, 2);
                                    }
                                }
                            }
                    watch.Stop();
                }
        }
        private static void ComputeHomographyAndValidate(ref VectorOfKeyPoint trackedFeatures, ref VectorOfKeyPoint bootstrapKp, out Matrix <double> homographyMatrix)
        {
            //verify features with a homography
            var inlierMask = new VectorOfByte();
            var homography = new Mat();

            if (trackedFeatures.Size > 4)
            {
                CvInvoke.FindHomography(Utils.GetPointsVector(trackedFeatures), Utils.GetPointsVector(bootstrapKp), homography, HomographyMethod.Ransac, RansacThreshold,
                                        inlierMask);
            }

            homographyMatrix = new Matrix <double>(homography.Rows, homography.Cols, homography.DataPointer);

            int inliersNum = CvInvoke.CountNonZero(inlierMask);

            if (inliersNum != trackedFeatures.Size && inliersNum >= 4 && !homography.IsEmpty)
            {
                Utils.KeepVectorsByStatus(ref trackedFeatures, ref bootstrapKp, inlierMask);
            }
            else if (inliersNum < 10)
            {
                throw new Exception("Not enough features survived homography.");
            }
        }
Exemplo n.º 5
0
        public static Bitmap Skelatanize(Bitmap image)
        {
            Image <Gray, byte> imgOld = new Image <Gray, byte>(image);
            Image <Gray, byte> img2   = (new Image <Gray, byte>(imgOld.Width, imgOld.Height, new Gray(255))).Sub(imgOld);
            Image <Gray, byte> eroded = new Image <Gray, byte>(img2.Size);
            Image <Gray, byte> temp   = new Image <Gray, byte>(img2.Size);
            Image <Gray, byte> skel   = new Image <Gray, byte>(img2.Size);

            skel.SetValue(0);
            CvInvoke.Threshold(img2, img2, 127, 256, 0);
            var  element = CvInvoke.GetStructuringElement(ElementShape.Cross, new Size(3, 3), new Point(-1, -1));
            bool done    = false;

            while (!done)
            {
                CvInvoke.Erode(img2, eroded, element, new Point(-1, -1), 1, BorderType.Reflect, default(MCvScalar));
                CvInvoke.Dilate(eroded, temp, element, new Point(-1, -1), 1, BorderType.Reflect, default(MCvScalar));
                CvInvoke.Subtract(img2, temp, temp);
                CvInvoke.BitwiseOr(skel, temp, skel);
                eroded.CopyTo(img2);
                if (CvInvoke.CountNonZero(img2) == 0)
                {
                    done = true;
                }
            }
            return(skel.Bitmap);
        }
Exemplo n.º 6
0
        private static int[] GetAdjust(Image <Gray, byte> ImgCap, Image <Gray, byte> ImgGerber, int RangeX, int RangeY)
        {
            int    x_ok = 0;
            int    y_ok = 0;
            double diff = 2448 * 2018;

            for (int x = -RangeX; x < RangeX; x++)
            {
                for (int y = -RangeY; y < RangeY; y++)
                {
                    using (Image <Gray, byte> imgTransform = ImageProcessingUtils.ImageTransformation(ImgCap.Copy(), x, y))
                    {
                        CvInvoke.AbsDiff(imgTransform, ImgGerber, imgTransform);

                        int count = CvInvoke.CountNonZero(imgTransform);
                        if (count < diff)
                        {
                            diff = count;
                            x_ok = x;
                            y_ok = y;
                        }
                    }
                }
            }
            return(new int[] { x_ok, y_ok });
        }
Exemplo n.º 7
0
        private static void GetCenterPointForDatum(clsPoint pt, double[,] model, ARParam arParams, int[] vp, Image <Gray, byte> grayImage, ref Emgu.CV.Util.VectorOfPointF centerPoints)
        {
            var cpt        = ModelToImageSpace(arParams, model, pt);
            var halfSquare = GetSquareForDatum(arParams, model, pt);

            if (halfSquare < 8)
            {
                return;
            }
            if (cpt.x - halfSquare < 0 || cpt.x + halfSquare > vp[2] || cpt.y - halfSquare <0 || cpt.y + halfSquare> vp[3])
            {
                return;
            }

            var    rect          = new Rectangle((int)cpt.x - halfSquare, (int)cpt.y - halfSquare, 2 * halfSquare, 2 * halfSquare);
            var    region        = new Mat(grayImage.Mat, rect);
            var    binaryRegion  = region.Clone();
            double otsuThreshold = CvInvoke.Threshold(region, binaryRegion, 0.0, 255.0, Emgu.CV.CvEnum.ThresholdType.Otsu);
            int    nonzero       = CvInvoke.CountNonZero(binaryRegion);
            var    square        = 4 * halfSquare * halfSquare;

            if (nonzero > square * 0.2f && nonzero < square * 0.8f)
            {
                centerPoints.Push(new PointF[] { new PointF((float)cpt.X, (float)cpt.Y) });
            }
        }
Exemplo n.º 8
0
        private void FindMatch(Mat observedImage)
        {
            int k = 2;

            mask           = new Mat();
            homography     = null;
            matches        = new VectorOfVectorOfDMatch();
            uObservedImage = observedImage.GetUMat(AccessType.ReadWrite);

            // extract features from the observed image
            ORBCPU.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);
            matcher = new BFMatcher(DistanceType.L2);
            matcher.Add(objDescriptors);
            if (objDescriptors.Size.Height > 3 && observedDescriptors.Size.Height > 3)
            {
                matcher.KnnMatch(observedDescriptors, matches, k, null);
                mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                mask.SetTo(new MCvScalar(255));
                Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);

                int nonZeroCount = CvInvoke.CountNonZero(mask);
                if (nonZeroCount >= 4)
                {
                    //nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(objKeyPoints, observedKeyPoints,
                    //matches, mask, 1, 2);
                    if (nonZeroCount >= 4)
                    {
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(objKeyPoints,
                                                                                              observedKeyPoints, matches, mask, 3);
                    }
                }
            }
        }
Exemplo n.º 9
0
        public static void GetMatches(VectorOfKeyPoint imageKeypoints, IInputArray imageDescriptors, VectorOfKeyPoint patternKeypoints, IInputArray patternDescriptors, out VectorOfVectorOfDMatch matches, out Mat homography)
        {
            int    k = 2;
            double uniquenessThreshold = 0.8;

            homography = null;

            matches = new VectorOfVectorOfDMatch();

            var matcher = new BFMatcher(DistanceType.L2);

            matcher.Add(patternDescriptors);
            matcher.KnnMatch(imageDescriptors, matches, k, null);

            var mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);

            mask.SetTo(new MCvScalar(255));
            Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

            int nonZeroCount = CvInvoke.CountNonZero(mask);

            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(patternKeypoints, imageKeypoints, matches, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(patternKeypoints, imageKeypoints, matches, mask, 2);
                }
            }
        }
Exemplo n.º 10
0
        /// <summary>
        /// 计算文件夹中的所有的bmp文件组成的图像序列的孔隙率
        /// 默认情况下,白色是孔隙相,黑色是固体相,此时不需要对图像进行反相
        /// example :
        ///         ComputeParameters.ComputePorosity(dlg.SelectedPath, ref porosity, true)
        /// </summary>
        /// <param name="folder">文件夹名称</param>
        /// <param name="porosity">孔隙率</param>
        /// <param name="needReverse">是否需要反相</param>
        /// <returns></returns>
        public static bool ComputePorosity(string folder, ref double porosity, bool needReverse = false)
        {
            bool res = true;

            porosity = 0.0;
            if (Directory.Exists(folder))
            {
                var             files = Directory.GetFiles(folder, "*.bmp");
                Matrix <double> pMat  = new Matrix <double>(files.Count(), 1);
                Mat             img;
                int             index = 0;
                foreach (var file in files)
                {
                    img = CvInvoke.Imread(file, Emgu.CV.CvEnum.LoadImageType.Grayscale);
                    if (needReverse)
                    {
                        CvInvoke.BitwiseNot(img, img);
                    }
                    pMat[index++, 0] = CvInvoke.CountNonZero(img) * 1.0 / (img.Cols * img.Rows);
                    porosity        += pMat[index - 1, 0];
                }
                porosity /= files.Count();
                DataReadWriteHelper.RecordInfo("porosity.txt", PackingSystemSetting.ResultDir, pMat, false);
            }
            else
            {
                res = false;
            }

            return(res);
        }
Exemplo n.º 11
0
        public void TestConvolutionAndLaplace()
        {
            Mat image = new Mat(new Size(300, 400), DepthType.Cv8U, 1);

            CvInvoke.Randu(image, new MCvScalar(0.0), new MCvScalar(255.0));
            Mat laplacian = new Mat();

            CvInvoke.Laplacian(image, laplacian, DepthType.Cv8U);

            float[,] k = { { 0,  1, 0 },
                           { 1, -4, 1 },
                           { 0,  1, 0 } };
            ConvolutionKernelF kernel = new ConvolutionKernelF(k);
            Mat convoluted            = new Mat(image.Size, DepthType.Cv8U, 1);

            CvInvoke.Filter2D(image, convoluted, kernel, kernel.Center);

            Mat absDiff = new Mat();

            CvInvoke.AbsDiff(laplacian, convoluted, absDiff);
            int nonZeroPixelCount = CvInvoke.CountNonZero(absDiff);

            EmguAssert.IsTrue(nonZeroPixelCount == 0);

            //Emgu.CV.UI.ImageViewer.Show(absDiff);
        }
Exemplo n.º 12
0
        public float getRegionSkinPercentage(Image <Gray, Byte> input, float cols, float rows)
        {
            float area     = cols * rows;
            int   white    = CvInvoke.CountNonZero(input);
            float skinPerc = (white / area);

            return(skinPerc);
        }
Exemplo n.º 13
0
        //public static Highlighter[] HighlightMatches(Rectangle[] Matches)
        //{
        //    var result = new List<Highlighter>();
        //    foreach (var m in Matches)
        //    {
        //        result.Add(new Highlighter(m, System.Drawing.Color.Red));
        //    }
        //    return result.ToArray();
        //}
        //public static void HighlightMatches(Rectangle[] Matches, TimeSpan Duration)
        //{
        //    foreach (var Match in Matches)
        //    {
        //        Task.Factory.StartNew(() =>
        //        {
        //            var h2 = new Highlighter(Match, System.Drawing.Color.Red);
        //            System.Threading.Thread.Sleep(Duration);
        //            h2.remove();
        //        });
        //    }
        //}
        //public static void HighlightMatch(Rectangle Match, bool Blocking, Color Color, TimeSpan Duration)
        //{
        //    if (!Blocking)
        //    {
        //        Task.Factory.StartNew(() =>
        //        {
        //            var h2 = new Highlighter(Match, System.Drawing.Color.Red);
        //            System.Threading.Thread.Sleep(Duration);
        //            h2.remove();
        //            System.Windows.Forms.Application.DoEvents();
        //        });
        //        return;
        //    }
        //    var h = new Highlighter(Match, System.Drawing.Color.Red);
        //    System.Threading.Thread.Sleep(Duration);
        //    h.remove();
        //    System.Windows.Forms.Application.DoEvents();
        //}



        //https://www.meridium.se/sv/blogg/imagematching-using-opencv/
        public static void FindMatch(Mat modelImage, Mat observedImage, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography, out long score)
        {
            int    k = 2;
            double uniquenessThreshold = 0.80;

            homography = null;

            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

            using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
                using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read))
                {
                    var featureDetector = new Emgu.CV.Features2D.KAZE();

                    Mat modelDescriptors = new Mat();
                    featureDetector.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

                    Mat observedDescriptors = new Mat();
                    featureDetector.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);

                    // KdTree for faster results / less accuracy
                    using (var ip = new Emgu.CV.Flann.KdTreeIndexParams())
                        using (var sp = new Emgu.CV.Flann.SearchParams())
                            using (var matcher = new Emgu.CV.Features2D.FlannBasedMatcher(ip, sp))
                            {
                                matcher.Add(modelDescriptors);

                                matcher.KnnMatch(observedDescriptors, matches, k, null);
                                mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                                mask.SetTo(new MCvScalar(255));
                                Emgu.CV.Features2D.Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                                // Calculate score based on matches size
                                // ---------------------------------------------->
                                score = 0;
                                for (int i = 0; i < matches.Size; i++)
                                {
                                    // if (mask.GetData(i)[0] == 0) continue;
                                    foreach (var e in matches[i].ToArray())
                                    {
                                        ++score;
                                    }
                                }
                                // <----------------------------------------------

                                int nonZeroCount = CvInvoke.CountNonZero(mask);
                                if (nonZeroCount >= 4)
                                {
                                    nonZeroCount = Emgu.CV.Features2D.Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20);
                                    if (nonZeroCount >= 4)
                                    {
                                        homography = Emgu.CV.Features2D.Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2);
                                    }
                                }
                            }
                }
        }
Exemplo n.º 14
0
        public static int FindMatch2(Mat modelImage, TemplateContainer.ImageData template, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography)
        {
            Stopwatch watch;

            homography = null;
            BriefDescriptorExtractor descriptor = new BriefDescriptorExtractor();

            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

            using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
            //using (UMat uObservedImage = template.image.Mat.GetUMat(AccessType.Read))
            {
                //extract features from the object image
                Mat modelDescriptors = new Mat();
                featureDetector.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

                watch = Stopwatch.StartNew();

                // extract features from the observed image

                //featureDetector.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);
                observedKeyPoints   = template.keyPointsSurf;
                observedDescriptors = template.descriptorSurf;
                // Bruteforce, slower but more accurate
                // You can use KDTree for faster matching with slight loss in accuracy
                using (Emgu.CV.Flann.LinearIndexParams ip = new Emgu.CV.Flann.LinearIndexParams())
                    using (Emgu.CV.Flann.SearchParams sp = new SearchParams())
                        using (DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp))
                        {
                            matcher.Add(modelDescriptors);

                            matcher.KnnMatch(observedDescriptors, matches, k, null);
                            mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                            mask.SetTo(new MCvScalar(255));
                            Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                            nonZeroCount = CvInvoke.CountNonZero(mask);
                            if (nonZeroCount >= 10)
                            {
                                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                           matches, mask, 1.8, 18);
                                if (nonZeroCount >= 12)
                                {
                                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                                                                                                          observedKeyPoints, matches, mask, 2);
                                }
                            }
                        }
                watch.Stop();
            }
            matchTime = watch.ElapsedMilliseconds;
            return(nonZeroCount);
        }
Exemplo n.º 15
0
        private float ImageWhiteRatio(Mat thresImage)
        {
            if (thresImage.Depth != DepthType.Cv8U)
            {
                throw new Exception("Изображение должно быть черно-белым");
            }
            int   whiteCount      = CvInvoke.CountNonZero(thresImage);
            int   imagePixelCount = thresImage.Rows * thresImage.Cols;
            float whiteRatio      = (float)(whiteCount) / (float)imagePixelCount;

            return(whiteRatio);
        }
        public static List <ImageSearchResult> SearchImageForObjects(WorldObject modelObject, string imageToSearch)
        {
            int    k = 2;
            double uniquenessThreshold = 0.8;
            double hessianThresh       = 300;

            int nonZeroThreshold = 10;

            ObjectFeatures targetImageFeatures = DetectFeatures_Brisk(imageToSearch);

            Mat mask;

            List <ImageSearchResult> searchResults = new List <ImageSearchResult>();

            foreach (ObjectView view in modelObject.Views)
            {
                if (view == null)
                {
                    continue;
                }

                VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();

                BFMatcher matcher = new BFMatcher(DistanceType.L2);
                matcher.Add(view.Features.Descriptors);

                matcher.KnnMatch(targetImageFeatures.Descriptors, matches, 2, null);

                mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);

                mask.SetTo(new MCvScalar(255));

                Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                int nonZeroCount = CvInvoke.CountNonZero(mask);

                if (nonZeroCount >= nonZeroThreshold)
                {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(view.Features.KeyPoints,
                                                                               targetImageFeatures.KeyPoints, matches, mask, 1.5, 20);

                    if (nonZeroCount >= nonZeroThreshold)
                    {
                        Mat homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(view.Features.KeyPoints,
                                                                                                  targetImageFeatures.KeyPoints, matches, mask, 2);

                        searchResults.Add(new ImageSearchResult(view, homography, matches, targetImageFeatures, mask));
                    }
                }
            }

            return(searchResults);
        }
Exemplo n.º 17
0
        public static Bitmap Skelatanize(Bitmap image2)
        {
            Bitmap image = new Bitmap(image2);

            for (int y = 0; (y <= (image.Height - 1)); y++)
            {
                for (int x = 0; (x <= (image.Width - 1)); x++)
                {
                    Color inv = image.GetPixel(x, y);
                    inv = Color.FromArgb(255, (255 - inv.R), (255 - inv.G), (255 - inv.B));
                    image.SetPixel(x, y, inv);
                }
            }

            Image <Gray, byte> imgOld = new Image <Gray, byte>(image);
            Image <Gray, byte> img2   = (new Image <Gray, byte>(imgOld.Width, imgOld.Height, new Gray(255))).Sub(imgOld);
            Image <Gray, byte> eroded = new Image <Gray, byte>(img2.Size);
            Image <Gray, byte> temp   = new Image <Gray, byte>(img2.Size);
            Image <Gray, byte> skel   = new Image <Gray, byte>(img2.Size);

            skel.SetValue(0);
            CvInvoke.Threshold(img2, img2, 127, 255, 0);
            var  element = CvInvoke.GetStructuringElement(ElementShape.Cross, new Size(3, 3), new Point(-1, -1));
            bool done    = false;

            while (!done)
            {
                CvInvoke.Erode(img2, eroded, element, new Point(-1, -1), 1, BorderType.Reflect, default(MCvScalar));
                CvInvoke.Dilate(eroded, temp, element, new Point(-1, -1), 1, BorderType.Reflect, default(MCvScalar));
                CvInvoke.Subtract(img2, temp, temp);
                CvInvoke.BitwiseOr(skel, temp, skel);
                eroded.CopyTo(img2);
                if (CvInvoke.CountNonZero(img2) == 0)
                {
                    done = true;
                }
            }

            Bitmap image3 = new Bitmap(skel.Bitmap);

            for (int y = 0; (y <= (image.Height - 1)); y++)
            {
                for (int x = 0; (x <= (image.Width - 1)); x++)
                {
                    Color inv = image.GetPixel(x, y);
                    inv = Color.FromArgb(255, (255 - inv.R), (255 - inv.G), (255 - inv.B));
                    image.SetPixel(x, y, inv);
                }
            }


            return(image3);
        }
Exemplo n.º 18
0
        public float getImageSkinPercentage(Image <Gray, Byte> input)
        {
            float area = input.Cols * input.Rows;

            int   white    = CvInvoke.CountNonZero(input);
            float skinPerc = (white / area);

            if (float.IsNaN(skinPerc))
            {
                skinPerc = 0;
            }
            return(skinPerc);
        }
Exemplo n.º 19
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="observedKeyPoints"></param>
        /// <param name="modelKeyPoint"></param>
        /// <returns></returns>
        public static VectorOfPoint Matching(VectorOfKeyPoint observedKeyPoints, VectorOfKeyPoint modelKeyPoints, Mat observedDescriptors, Mat modelDescriptors, Size modelSize)
        {
            KAZE featureDetector = new KAZE();

            using (var ip = new LinearIndexParams())
                using (var sp = new SearchParams())
                    using (var matcher = new FlannBasedMatcher(ip, sp))
                        using (var matches = new VectorOfVectorOfDMatch())
                        {
                            Mat homography = new Mat();
                            matcher.Add(modelDescriptors);

                            matcher.KnnMatch(observedDescriptors, matches, 2, null);
                            var mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                            mask.SetTo(new MCvScalar(255));
                            Features2DToolbox.VoteForUniqueness(matches, 0.80, mask);

                            int nonZeroCount = CvInvoke.CountNonZero(mask);
                            if (nonZeroCount >= 4)
                            {
                                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20);
                                if (nonZeroCount >= 4)
                                {
                                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2);
                                }
                            }

                            if (homography != null && !homography.Size.IsEmpty)
                            {
                                Rectangle rect = new Rectangle(Point.Empty, modelSize);
                                PointF[]  pts  = new PointF[]
                                {
                                    new PointF(rect.Left, rect.Bottom),
                                    new PointF(rect.Right, rect.Bottom),
                                    new PointF(rect.Right, rect.Top),
                                    new PointF(rect.Left, rect.Top)
                                };

                                pts = CvInvoke.PerspectiveTransform(pts, homography);

                                Point[] points = Array.ConvertAll <PointF, Point>(pts, Point.Round);

                                return(new VectorOfPoint(points));
                            }
                            else
                            {
                                return(new VectorOfPoint());
                            }
                        }
        }
Exemplo n.º 20
0
        private static bool IntersectsContour(Point[] Cnt1, Point[] Cnt2, Rectangle BoudingCnt1)
        {
            bool intersect    = false;
            bool canIntersect = false;

            Point[] cntPTranform1 = new Point[Cnt1.Length];
            Point[] cntPTranform2 = new Point[Cnt2.Length];
            for (int i = 0; i < Cnt1.Length; i++)
            {
                cntPTranform1[i] = new Point(Cnt1[i].X - BoudingCnt1.X, Cnt1[i].Y - BoudingCnt1.Y);
            }
            for (int i = 0; i < Cnt2.Length; i++)
            {
                cntPTranform2[i] = new Point(Cnt2[i].X - BoudingCnt1.X, Cnt2[i].Y - BoudingCnt1.Y);
                if ((cntPTranform2[i].X > 0 && cntPTranform2[i].X < BoudingCnt1.Width) ||
                    (cntPTranform2[i].Y > 0 && cntPTranform2[i].Y < BoudingCnt1.Height))
                {
                    canIntersect = true;
                }
            }
            if (canIntersect)
            {
                using (VectorOfPoint cnt1 = new VectorOfPoint(cntPTranform1))
                    using (VectorOfPoint cnt2 = new VectorOfPoint(cntPTranform2))
                        using (VectorOfVectorOfPoint contour1 = new VectorOfVectorOfPoint())
                            using (VectorOfVectorOfPoint contour2 = new VectorOfVectorOfPoint())
                                using (Image <Gray, byte> image1 = new Image <Gray, byte>(BoudingCnt1.Size))
                                    using (Image <Gray, byte> image2 = new Image <Gray, byte>(BoudingCnt1.Size))
                                        using (Image <Gray, byte> imageAnd = new Image <Gray, byte>(BoudingCnt1.Size))
                                        {
                                            contour1.Push(cnt1);
                                            contour2.Push(cnt2);
                                            CvInvoke.DrawContours(image1, contour1, -1, new MCvScalar(255), -1);
                                            CvInvoke.DrawContours(image2, contour2, -1, new MCvScalar(255), -1);
                                            CvInvoke.BitwiseAnd(image1, image2, imageAnd);
                                            int count = CvInvoke.CountNonZero(imageAnd);
                                            if (count > 0)
                                            {
                                                intersect = true;
                                            }
                                            //CvInvoke.Imshow("image1", image1);
                                            //CvInvoke.Imshow("image2", image2);
                                            //CvInvoke.Imshow("imageAnd", imageAnd);
                                            //CvInvoke.WaitKey(0);
                                            //Console.WriteLine("go");
                                        }
            }
            return(intersect);
        }
Exemplo n.º 21
0
        /// <summary>
        ///
        ///
        /// TODO: thresholds must be set
        /// </summary>
        /// <param name="model"></param>
        /// <param name="imageToSearch"></param>
        /// <returns></returns>
        public static bool SearchImageForObjects(List <ObjectFeatures> model, Bitmap image)
        {
            int    k = 2;
            double uniquenessThreshold = 0.8;
            double hessianThresh       = 300;

            ObjectFeatures targetImageFeatures = DetectFeatures(image);

            Mat mask;

            VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();

            BFMatcher matcher = new BFMatcher(DistanceType.L2);

            foreach (ObjectFeatures of in model)
            {
                matcher.Add(of.Descriptors);

                matcher.KnnMatch(targetImageFeatures.Descriptors, matches, 2, null);

                mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);

                mask.SetTo(new MCvScalar(255));

                Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                int nonZeroCount = CvInvoke.CountNonZero(mask);

                if (nonZeroCount >= 4)
                {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(of.KeyPoints,
                                                                               targetImageFeatures.KeyPoints, matches, mask, 1.5, 20);

                    if (nonZeroCount >= 4)
                    {
                        return(true);

                        /*
                         * Mat homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(view.Features.KeyPoints,
                         *  targetImageFeatures.KeyPoints, matches, mask, 2);
                         *
                         * searchResults.Add(new ImageSearchResult(view, homography, matches));
                         */
                    }
                }
            }

            return(false);
        }
Exemplo n.º 22
0
Arquivo: SURF.cs Projeto: okeanz/IPS
        public static void FindMatchWM(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography, Feature2D computer, Feature2D detector)
        {
            Stopwatch watch;

            modelKeyPoints    = new VectorOfKeyPoint(); // точки на модели
            observedKeyPoints = new VectorOfKeyPoint(); // точки на большем изображении
            homography        = null;
            int k = 2;


            using (Mat uModelImage = modelImage.Clone())
                using (Mat uObservedImage = observedImage.Clone())
                {
                    //получаем дескрипторы из первого изображения
                    Mat modelDescriptors = new Mat();
                    DetectAndCompute(uModelImage, out modelKeyPoints, out modelDescriptors, detector, computer);

                    watch = Stopwatch.StartNew();

                    // ... из второго изображения
                    Mat observedDescriptors = new Mat();
                    DetectAndCompute(uObservedImage, out observedKeyPoints, out observedDescriptors, detector, computer);


                    BFMatcher matcher = new BFMatcher(DistanceType.L2); // "сравниватель" дескрипторов на 2-х изображениях
                    matcher.Add(modelDescriptors);

                    matcher.KnnMatch(observedDescriptors, matches, k, null); // сравнение
                    mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                    mask.SetTo(new MCvScalar(255));
                    Features2DToolbox.VoteForUniqueness(matches, 0.8, mask); // построениии маски (см ниже)

                    int nonZeroCount = CvInvoke.CountNonZero(mask);
                    if (nonZeroCount >= 4)
                    {
                        nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                   matches, mask, 1.5, 20);
                        if (nonZeroCount >= 4)
                        {
                            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, // получение предположительной зоны, куда должна встать модель
                                                                                                  observedKeyPoints, matches, mask, 2);
                        }
                    }

                    watch.Stop();
                }
            matchTime = watch.ElapsedMilliseconds;
        }
Exemplo n.º 23
0
        public void FindMatch(Image <Gray, byte> modelImage, Image <Gray, byte> observedImage, double hessianThresh, int k,
                              double uniquenessThreshold, VectorOfVectorOfDMatch matches, out VectorOfKeyPoint modelKeyPoints,
                              out VectorOfKeyPoint observedKeyPoints, out Mat mask, out Mat homography)
        {
            homography        = null;
            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

            CudaSURFDetector surfCuda = new CudaSURFDetector((float)hessianThresh);

            using (GpuMat gpuModelImage = new GpuMat(modelImage))
                //extract features from the object image
                using (GpuMat gpuModelKeyPoints = surfCuda.DetectKeyPointsRaw(gpuModelImage, null))
                    using (
                        GpuMat gpuModelDescriptors = surfCuda.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
                        using (CudaBFMatcher matcher = new CudaBFMatcher(DistanceType.L2))
                        {
                            surfCuda.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);

                            // extract features from the observed image
                            using (GpuMat gpuObservedImage = new GpuMat(observedImage))
                                using (GpuMat gpuObservedKeyPoints = surfCuda.DetectKeyPointsRaw(gpuObservedImage, null))
                                    using (
                                        GpuMat gpuObservedDescriptors = surfCuda.ComputeDescriptorsRaw(gpuObservedImage, null,
                                                                                                       gpuObservedKeyPoints))
                                    {
                                        matcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, matches, k);

                                        surfCuda.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);

                                        mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                                        mask.SetTo(new MCvScalar(255));
                                        Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                                        int nonZeroCount = CvInvoke.CountNonZero(mask);
                                        if (nonZeroCount >= 4)
                                        {
                                            nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                                       matches, mask, 1.5, 20);
                                            if (nonZeroCount >= 4)
                                            {
                                                homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                                                                                                                      observedKeyPoints, matches, mask, 2);
                                            }
                                        }
                                    }
                        }
        }
        public static void FindMatch(string pageFile, string templateFile)
        {
            Image <Rgb, byte> page     = getPreprocessedImage(pageFile);
            Image <Rgb, byte> template = getPreprocessedImage(templateFile);

            var detector = new ORBDetector();
            VectorOfKeyPoint templateKeyPoints = new VectorOfKeyPoint();
            Mat templateDescriptors            = new Mat();

            detector.DetectAndCompute(template, null, templateKeyPoints, templateDescriptors, false);

            VectorOfKeyPoint pageKeyPoints = new VectorOfKeyPoint();
            Mat pageDescriptors            = new Mat();

            detector.DetectAndCompute(page, null, pageKeyPoints, pageDescriptors, false);
            using (var matcher = new BFMatcher(DistanceType.L1))
            {
                matcher.Add(templateDescriptors);
                VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();

                //VectorOfDMatch matches2 = new VectorOfDMatch();
                //matcher.Match(pageDescriptors, matches2);


                matcher.KnnMatch(pageDescriptors, matches, 2, null);

                Mat mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                mask.SetTo(new MCvScalar(255));
                Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);
                Mat homography   = new Mat();
                int nonZeroCount = CvInvoke.CountNonZero(mask);
                if (nonZeroCount >= 4)
                {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(templateKeyPoints, pageKeyPoints, matches, mask, 1.5, 20);
                    if (nonZeroCount >= 4)
                    {
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(templateKeyPoints, pageKeyPoints, matches, mask, 2);
                    }
                }

                Mat result = new Mat();
                Features2DToolbox.DrawMatches(template, templateKeyPoints, page, pageKeyPoints, matches, result, new MCvScalar(0, 255, 0), new MCvScalar(255, 0, 0), mask, Features2DToolbox.KeypointDrawType.NotDrawSinglePoints);

                //Features2DToolbox.DrawMatches(template, templateKeyPoints, page, pageKeyPoints, matches2, result, new MCvScalar(0, 255, 0), new MCvScalar(255, 0, 0), null, Features2DToolbox.KeypointDrawType.NotDrawSinglePoints);

                MainForm.This.PageBox.Image = result.ToBitmap();
            }
        }
Exemplo n.º 25
0
        public void TestMatrixSubtract()
        {
            Matrix <float> f = new Matrix <float>(600, 480);

            //set the value to 300
            f.SetValue(new MCvScalar(300));
            f -= 10;
            using (ScalarArray sa = new ScalarArray(290))
                using (Mat absDiff = new Mat())
                {
                    //calculate the different of the value in f mat with 290
                    CvInvoke.AbsDiff(f, sa, absDiff);
                    int nonZeroCount = CvInvoke.CountNonZero(absDiff);
                    //Console.WriteLine(String.Format("number of elements that is not 290: {0}", nonZeroCount));
                }
        }
Exemplo n.º 26
0
        public IDrawer FindMatch(KeyFrame keyFrame, Image <Bgr, Byte> observedImage, List <KeyFrame> keyframes = null)
        {
            if (keyFrame.KeyPoints == null)
            {
                keyFrame.KeyPoints = new VectorOfKeyPoint(CPU.Detect(keyFrame.Frame));
            }
            if (keyFrame.Descriptors == null)
            {
                keyFrame.Descriptors = new Mat();
                descriptor.Compute(keyFrame.Frame, keyFrame.KeyPoints, keyFrame.Descriptors);
            }
            // extract features from the observed image
            observedKeyPoints = new VectorOfKeyPoint(CPU.Detect(observedImage));
            descriptor.Compute(observedImage, observedKeyPoints, observedDescriptors);
            matcher.Add(keyFrame.Descriptors);

            matcher.KnnMatch(observedDescriptors, matches, k, null);
            mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
            mask.SetTo(new MCvScalar(255));
            Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

            int nonZeroCount = CvInvoke.CountNonZero(mask);

            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(keyFrame.KeyPoints, observedKeyPoints,
                                                                           matches, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(keyFrame.KeyPoints, observedKeyPoints, matches, mask, 2);
                }

                nonZeroCount = CvInvoke.CountNonZero(mask);

                if (nonZeroCount < 9)
                {
                    homography = null;
                }

                //if (keyframes != null && homography == null)
                //    keyframes.Add(new KeyFrame() { Frame = observedImage, KeyPoints = observedKeyPoints, Descriptors = observedDescriptors });
            }

            return(this);
        }
Exemplo n.º 27
0
        protected Mat GetHomography(VectorOfKeyPoint keyPoints1, VectorOfKeyPoint keyPoints2, VectorOfVectorOfDMatch matches, Mat mask)
        {
            Mat homography = null;

            mask.SetTo(new MCvScalar(255));
            Features2DToolbox.VoteForUniqueness(matches, 0.9, mask);
            int i = CvInvoke.CountNonZero(mask);

            if (i >= 4)
            {
                i = Features2DToolbox.VoteForSizeAndOrientation(keyPoints1, keyPoints2, matches, mask, 1.5, 20);
                if (i >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(keyPoints1, keyPoints2, matches, mask, 2);
                }
            }
            return(homography);
        }
Exemplo n.º 28
0
        private static Mat FindMatchWithoutCuda(Mat modelImage, Mat observedImage, VectorOfKeyPoint modelKeyPoints, VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, Mat homography, int k, double uniquenessThreshold, double hessianThresh)
        {
            using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
                using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read))
                {
                    SURF surfCPU = new SURF(hessianThresh, upright: true);

                    UMat modelDescriptors;
                    if (!FindDescriptors(surfCPU, modelKeyPoints, uModelImage, out modelDescriptors))
                    {
                        Logger.Log(LogType.Error, "Feature Descriptor for Model image is empty. Is the image too small?");
                        return(mask = null);
                    }

                    UMat observedDescriptors;
                    if (!FindDescriptors(surfCPU, observedKeyPoints, uObservedImage, out observedDescriptors))
                    {
                        Logger.Log(LogType.Error, "Feature Descriptor for Observed image is empty. Is the image too small?");
                        return(mask = null);
                    }

                    BFMatcher matcher = new BFMatcher(DistanceType.L2);
                    matcher.Add(modelDescriptors);
                    matcher.KnnMatch(observedDescriptors, matches, k, null);

                    mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                    mask.SetTo(new MCvScalar(255));
                    Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                    int nonZeroCount = CvInvoke.CountNonZero(mask);
                    if (nonZeroCount >= 4)
                    {
                        nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20);
                        if (nonZeroCount >= 4)
                        {
                            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2);
                        }
                    }
                }

            return(homography);
        }
        /// <summary>
        /// Find wrinkles on an image with in a recangle
        /// </summary>
        /// <param name="shortImage">Image on which wrinkles need to calculate</param>
        /// <param name="rect">Rectangles with in which wrinkles needs to calculate</param>
        /// <returns>Returns the intensity of wrinkles</returns>
        public double findWrinkles(Mat shortImage, Rectangle rect)
        {
            Image <Gray, byte> procImage = shortImage.ToImage <Gray, byte>();

            procImage.ROI = rect;

            float[]        GrayHist;
            DenseHistogram Histo = new DenseHistogram(255, new RangeF(0, 255));

            Histo.Calculate(new Image <Gray, byte>[] { procImage }, true, null);

            ////The data is here
            GrayHist = new float[256];
            Histo.CopyTo <float>(GrayHist);
            ////Calculate Threashold on the basis of histogram of Image
            int threashHold = calculate_threashHold(GrayHist);

            procImage = procImage.Canny(threashHold, threashHold);
            return((double)CvInvoke.CountNonZero(procImage));
        }
Exemplo n.º 30
0
        public void FindMatches(Image <Rgb, byte> SubMap, out VectorOfKeyPoint VectorSubMapKeyPoint,
                                out Mat SubMapDiscriptors, out VectorOfVectorOfDMatch matches,
                                out Mat mask, out System.Drawing.Rectangle zone, out Mat homography, int k, double uniquenessThreshold, SIFTParametrs parametrs)
        {
            VectorSubMapKeyPoint = new VectorOfKeyPoint();
            SubMapDiscriptors    = new Mat();
            matches = new VectorOfVectorOfDMatch();
            zone    = new System.Drawing.Rectangle();
            using (SIFT siftCPU = new SIFT(parametrs.nFeatures, parametrs.nOctaveLayers,
                                           parametrs.contrastThreshold, parametrs.edgeThreshold, parametrs.sigma))
            {
                siftCPU.DetectAndCompute(SubMap, null, VectorSubMapKeyPoint, SubMapDiscriptors, false);
            }
            matches = new VectorOfVectorOfDMatch();
            using (Emgu.CV.Flann.LinearIndexParams ip = new Emgu.CV.Flann.LinearIndexParams())
                using (Emgu.CV.Flann.SearchParams sp = new SearchParams())
                    using (Emgu.CV.Features2D.DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp))
                    {
                        matcher.Add(SubMapDiscriptors);
                        matcher.KnnMatch(MapDiscriptors, matches, k, null);
                    }

            mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
            mask.SetTo(new MCvScalar(255));
            Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

            homography = null;

            int nonZeroCount = CvInvoke.CountNonZero(mask);

            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(VectorSubMapKeyPoint, VectorMapKeyPoint,
                                                                           matches, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(
                        VectorSubMapKeyPoint, VectorMapKeyPoint, matches, mask, 2);
                }
            }
        }