Exemplo n.º 1
0
 private extern static void CvSURFDetectorComputeDescriptors(
    ref SURFDetector detector,
    IntPtr image,
    IntPtr mask,
    IntPtr keypoints,
    int numberOfKeyPoints,
    IntPtr descriptors);
Exemplo n.º 2
0
 private extern static void CvSURFDetectorComputeDescriptors(
     ref SURFDetector detector,
     IntPtr image,
     IntPtr mask,
     IntPtr keypoints,
     int numberOfKeyPoints,
     IntPtr descriptors);
Exemplo n.º 3
0
 public static extern void cvExtractSURF(
    IntPtr image, IntPtr mask,
    ref IntPtr keypoints,
    ref IntPtr descriptors,
    IntPtr storage,
    SURFDetector parameters,
    int useProvidedKeyPoints);
        public List<VectorOfKeyPoint> SURF_BruteForceMatcher(Image<Gray, byte> model, Image<Gray, byte> observed, int hessianThreshould, out SURFDetector surfCPU)
        {
            surfCPU = new SURFDetector(hessianThreshould, false);
            List<VectorOfKeyPoint> KeyPointsList = new List<VectorOfKeyPoint>();
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;
            try
            {

                modelKeyPoints = surfCPU.DetectKeyPointsRaw(model, null); // Extract features from the object image
                observedKeyPoints = surfCPU.DetectKeyPointsRaw(observed, null); // Extract features from the observed image

                if (modelKeyPoints.Size <= 0)
                    throw new System.ArgumentException("Can't find any keypoints in your model image!");

                KeyPointsList.Add(modelKeyPoints);
                KeyPointsList.Add(observedKeyPoints);

            }
            catch (Exception e)
            {
                Log.WriteLine("SURF_BruteForceMatcher: " + e.Message);
                Console.WriteLine(e.Message);
                throw e;
            }
            return KeyPointsList;
        }
Exemplo n.º 5
0
 public SurfRecognizer(Image<Gray, Byte> modelImage)
 {
     surfCPU = new SURFDetector(HessianThreshold, extendedflag);
     this.modelImage = modelImage;
     modelKeyPoints = new VectorOfKeyPoint();
     Matrix<float> modelDescriptors = surfCPU.DetectAndCompute(modelImage, null, modelKeyPoints); // extract information from the model image
 }
Exemplo n.º 6
0
 public SignDetector(Image<Bgr, Byte> stopSignModel)
 {
     _detector2 = new SURFDetector(500, false);
     using (Image<Gray, Byte> redMask = GetColorPixelMask(stopSignModel))
     {
         try
         {
             _tracker2 = new Features2DTracker<float>(_detector2.DetectFeatures(redMask, null));
         }
         catch { }
     }
     _octagonStorage2 = new MemStorage();
     _octagon2 = new Contour<Point>(_octagonStorage2);
     _octagon2.PushMulti(new Point[] { 
         //hexagon
         new Point(1, 0),
         new Point(2, 0),
         new Point(3, 1),
         new Point(2, 2),
         new Point(1, 2),
         new Point(0, 1)},
         //octagon
     //new Point(1, 0),
     //new Point(2, 0),
     //new Point(3, 1),
     //new Point(3, 2),
     //new Point(2, 3),
     //new Point(1, 3),
     //new Point(0, 2),
     //new Point(0, 1)},
        Emgu.CV.CvEnum.BACK_OR_FRONT.FRONT);
 }
Exemplo n.º 7
0
        /// <summary>
        /// Create a MCvSURFParams using the specific values
        /// </summary>
        /// <param name="hessianThresh">
        /// Only features with keypoint.hessian larger than that are extracted.
        /// good default value is ~300-500 (can depend on the average local contrast and sharpness of the image).
        /// user can further filter out some features based on their hessian values and other characteristics
        /// </param>
        /// <param name="extendedFlag">
        /// false means basic descriptors (64 elements each),
        /// true means extended descriptors (128 elements each)
        /// </param>
        public SURFDetector(double hessianThresh, bool extendedFlag)
        {
            SURFDetector p = CvInvoke.cvSURFParams(hessianThresh, extendedFlag ? 1 : 0);

            extended         = p.extended;
            hessianThreshold = p.hessianThreshold;
            nOctaves         = p.nOctaves;
            nOctaveLayers    = p.nOctaveLayers;
        }
Exemplo n.º 8
0
 private void CreateSurfaceTracker()
 {
     surfaceParameters = new SURFDetector(500, false);
     using (Image<Bgr, Byte> stopSignModel = new Image<Bgr, Byte>(Properties.Resources.SignModel))
     using (Image<Gray, Byte> redMask = GetRedPixelMask(stopSignModel))
     {
         featureTracker = new Features2DTracker(surfaceParameters.DetectFeatures(redMask, null));
     }
 }
Exemplo n.º 9
0
        static void Run()
        {
            SURFDetector surfParam = new SURFDetector(350, false);

             Image<Gray, Byte> modelImage = new Image<Gray, byte>("box.png");
             //extract features from the object image
             ImageFeature[] modelFeatures = surfParam.DetectFeatures(modelImage, null);

             //Create a Feature Tracker
             Features2DTracker tracker = new Features2DTracker(modelFeatures);

             Image<Gray, Byte> observedImage = new Image<Gray, byte>("box_in_scene.png");

             Stopwatch watch = Stopwatch.StartNew();
             // extract features from the observed image
             ImageFeature[] imageFeatures = surfParam.DetectFeatures(observedImage, null);

             Features2DTracker.MatchedImageFeature[] matchedFeatures = tracker.MatchFeature(imageFeatures, 2, 20);
             matchedFeatures = Features2DTracker.VoteForUniqueness(matchedFeatures, 0.8);
             matchedFeatures = Features2DTracker.VoteForSizeAndOrientation(matchedFeatures, 1.5, 20);
             HomographyMatrix homography = Features2DTracker.GetHomographyMatrixFromMatchedFeatures(matchedFeatures);
             watch.Stop();

             //Merge the object image and the observed image into one image for display
             Image<Gray, Byte> res = modelImage.ConcateVertical(observedImage);

             #region draw lines between the matched features
             foreach (Features2DTracker.MatchedImageFeature matchedFeature in matchedFeatures)
             {
            PointF p = matchedFeature.ObservedFeature.KeyPoint.Point;
            p.Y += modelImage.Height;
            res.Draw(new LineSegment2DF(matchedFeature.SimilarFeatures[0].Feature.KeyPoint.Point, p), new Gray(0), 1);
             }
             #endregion

             #region draw the project region on the image
             if (homography != null)
             {  //draw a rectangle along the projected model
            Rectangle rect = modelImage.ROI;
            PointF[] pts = new PointF[] {
               new PointF(rect.Left, rect.Bottom),
               new PointF(rect.Right, rect.Bottom),
               new PointF(rect.Right, rect.Top),
               new PointF(rect.Left, rect.Top)};
            homography.ProjectPoints(pts);

            for (int i = 0; i < pts.Length; i++)
               pts[i].Y += modelImage.Height;

            res.DrawPolyline(Array.ConvertAll<PointF, Point>(pts, Point.Round), true, new Gray(255.0), 5);
             }
             #endregion

             ImageViewer.Show(res, String.Format("Matched in {0} milliseconds", watch.ElapsedMilliseconds));
        }
Exemplo n.º 10
0
        public SURFEngine(Emgu.CV.Image<Gray, byte> roi)
        {
            surfDetector = new SURFDetector(500, false);
            itemImage = roi;

            itemKP = surfDetector.DetectKeyPointsRaw(itemImage, null);
            itemDescriptors = surfDetector.ComputeDescriptorsRaw(itemImage, null, itemKP);

            matcher = new BruteForceMatcher<float>(DistanceType.L2);
            matcher.Add(itemDescriptors);
        }
Exemplo n.º 11
0
        public List<Keypoint> usingSurf(Bitmap image)
        {
            SURFDetector surf = new SURFDetector(750, false);
            Image<Gray, Byte> modelImage = new Image<Gray, byte>(new Bitmap(image));
            VectorOfKeyPoint modelKeyPoints = surf.DetectKeyPointsRaw(modelImage, null);
            MKeyPoint[] keypoints = modelKeyPoints.ToArray();

            Keypoint key;
            List<Keypoint> keypointsList = new List<Keypoint>();
            foreach (MKeyPoint keypoint in keypoints)
            {
                key = new Keypoint(keypoint.Point.X, keypoint.Point.Y, keypoint.Size);
                keypointsList.Add(key);
            }

            return keypointsList;
        }
Exemplo n.º 12
0
        public ImageDetector(Image<Gray, Byte> imgModel)
        {
            _detector = new SURFDetector(500, false);
            ImageFeature<float>[] features = _detector.DetectFeatures(imgModel, null);
            if (features.Length == 0)
                throw new Exception("No image feature has been found in the image model");
            _tracker = new Features2DTracker<float>(features);

            _octagonStorage = new MemStorage();
            _octagon = new Contour<Point>(_octagonStorage);
            _octagon.PushMulti(new Point[] {
            new Point(1, 0),
            new Point(2, 0),
            new Point(3, 1),
            new Point(3, 2),
            new Point(2, 3),
            new Point(1, 3),
            new Point(0, 2),
            new Point(0, 1)},
               Emgu.CV.CvEnum.BACK_OR_FRONT.FRONT);
        }
Exemplo n.º 13
0
 public StopSignDetector(Image<Bgr, Byte> stopSignModel)
 {
     _detector = new SURFDetector(500, false);
     using (Image<Gray, Byte> redMask = GetRedPixelMask(stopSignModel))
     {
         ImageFeature<float>[] temp = _detector.DetectFeatures(redMask, null);
         _tracker = new Features2DTracker<float>(temp);
     }
     _octagonStorage = new MemStorage();
     _octagon = new Contour<Point>(_octagonStorage);
     _octagon.PushMulti(new Point[] {
         new Point(1, 0),
         new Point(2, 0),
         new Point(3, 1),
         new Point(3, 2),
         new Point(2, 3),
         new Point(1, 3),
         new Point(0, 2),
         new Point(0, 1)},
         Emgu.CV.CvEnum.BACK_OR_FRONT.FRONT
     );
 }
Exemplo n.º 14
0
        public List<ImageRecord> QueryImage(string queryImagePath,  SurfSettings surfSetting = null)
        {
            List<ImageRecord> rtnImageList = new List<ImageRecord>();

            var observerFeatureSets = SurfRepository.GetSurfRecordList();

            #region Surf Dectator Region
            double hessianThresh = 500;
            double uniquenessThreshold = 0.8;
            int minGoodMatchPercent = 50;

            if (surfSetting != null)
            {
                hessianThresh = surfSetting.HessianThresh.Value;
                uniquenessThreshold = surfSetting.UniquenessThreshold.Value;
                minGoodMatchPercent = surfSetting.GoodMatchThreshold.Value;
            }

            SURFDetector surfDectector = new SURFDetector(hessianThresh, false);
            #endregion

            using (Image<Gray, byte> modelImage = new Image<Gray, byte>(queryImagePath))
            {
                ImageFeature<float>[] modelFeatures = surfDectector.DetectFeatures(modelImage, null);

                if (modelFeatures.Length < 4) throw new InvalidOperationException("Model image didn't have any significant features to detect");

                Features2DTracker<float> tracker = new Features2DTracker<float>(modelFeatures);
                foreach (var surfRecord in observerFeatureSets)
                {
                    string queryImageName = System.IO.Path.GetFileName(queryImagePath);
                    string modelImageName = surfRecord.ImageName;

                    Features2DTracker<float>.MatchedImageFeature[] matchedFeatures = tracker.MatchFeature(surfRecord.observerFeatures, 2);

                    Features2DTracker<float>.MatchedImageFeature[] uniqueFeatures = Features2DTracker<float>.VoteForUniqueness(matchedFeatures, uniquenessThreshold);

                    Features2DTracker<float>.MatchedImageFeature[] uniqueRotOriFeatures = Features2DTracker<float>.VoteForSizeAndOrientation(uniqueFeatures, 1.5, 20);

                    int goodMatchCount = 0;
                    goodMatchCount = uniqueRotOriFeatures.Length;
                    bool isMatch = false;

                    double totalnumberOfModelFeature = modelFeatures.Length;
                    double matchPercentage = ((totalnumberOfModelFeature - (double)goodMatchCount) / totalnumberOfModelFeature);
                    matchPercentage = (1 - matchPercentage) * 100;
                    matchPercentage = Math.Round(matchPercentage);
                    if (matchPercentage >= minGoodMatchPercent)
                    {

                        HomographyMatrix homography =
                            Features2DTracker<float>.GetHomographyMatrixFromMatchedFeatures(uniqueRotOriFeatures);
                        if (homography != null)
                        {
                            isMatch = homography.IsValid(5);
                            if (isMatch)
                            {
                                surfRecord.Distance = matchPercentage;
                                rtnImageList.Add((ImageRecord)surfRecord);
                            }
                        }
                    }

                    //bool isMatch = false;
                    //if (uniqueFeatures.Length > 4)
                    //{
                    //    HomographyMatrix homography =
                    //        Features2DTracker<float>.GetHomographyMatrixFromMatchedFeatures(uniqueRotOriFeatures);
                    //    if (homography != null)
                    //    {
                    //        isMatch = homography.IsValid(5);
                    //    }
                    //}

                    //if (isMatch)
                    //{
                    //    surfRecord.Distance = goodMatchCount;
                    //    rtnImageList.Add((ImageRecord)surfRecord);
                    //}

                    //int goodMatchCount = 0;
                    //foreach (Features2DTracker<float>.MatchedImageFeature ms in matchedFeatures)
                    //{
                    //    if (ms.SimilarFeatures[0].Distance < uniquenessThreshold)
                    //        goodMatchCount++;
                    //}

                    //double totalnumberOfModelFeature = modelFeatures.Length;
                    //double matchPercentage = ((totalnumberOfModelFeature - (double)goodMatchCount) / totalnumberOfModelFeature);
                    //matchPercentage = (1 - matchPercentage) * 100;
                    //matchPercentage = Math.Round(matchPercentage);
                    //if (matchPercentage >= minGoodMatchPercent)
                    //{
                    //    surfRecord.Distance = matchPercentage;
                    //    rtnImageList.Add((ImageRecord)surfRecord);
                    //}
                }
            }
            rtnImageList = rtnImageList.OrderByDescending(x => x.Distance).ToList();
            return rtnImageList;
        }
Exemplo n.º 15
0
        public void IndexFiles(FileInfo[] imageFiles, System.ComponentModel.BackgroundWorker IndexBgWorker,
            Action<string> logWriter,
            SurfSettings surfSetting = null)
        {
            #region Surf Dectator Region
            double hessianThresh = 500;
            double uniquenessThreshold = 0.8;

            if (surfSetting != null)
            {
                hessianThresh = surfSetting.HessianThresh.Value;
                uniquenessThreshold = surfSetting.UniquenessThreshold.Value;
            }

            SURFDetector surfDectector = new SURFDetector(hessianThresh, false);
            #endregion

            int rows = 0;

            Matrix<float> superMatrix = null;
            List<SURFRecord1> observerSurfImageIndexList = new List<SURFRecord1>();

            Stopwatch sw1, sw2;

            sw1 = Stopwatch.StartNew();
            logWriter("Index started...");
            int totalFileCount = imageFiles.Length;
            for (int i = 0; i < totalFileCount; i++)
            {
                var fi = imageFiles[i];
                using (Image<Gray, byte> observerImage = new Image<Gray, byte>(fi.FullName))
                {
                    VectorOfKeyPoint observerKeyPoints = new VectorOfKeyPoint();
                    Matrix<float> observerDescriptor = surfDectector.DetectAndCompute(observerImage, null, observerKeyPoints);

                    if (observerDescriptor.Rows > 4)
                    {
                        int initRow = rows; int endRows = rows + observerDescriptor.Rows - 1;

                        SURFRecord1 record = new SURFRecord1
                        {
                            Id = i,
                            ImageName = fi.Name,
                            ImagePath = fi.FullName,
                            IndexStart = rows,
                            IndexEnd = endRows
                        };

                        observerSurfImageIndexList.Add(record);

                        if (superMatrix == null)
                            superMatrix = observerDescriptor;
                        else
                            superMatrix = superMatrix.ConcateVertical(observerDescriptor);

                        rows = endRows + 1;
                    }
                    else
                    {
                        Debug.WriteLine(fi.Name + " skip from index, because it didn't have significant feature");
                    }
                }
                IndexBgWorker.ReportProgress(i);
            }
            sw1.Stop();
            logWriter(string.Format("Index Complete, it tooked {0} ms. Saving Repository...", sw1.ElapsedMilliseconds));
            SurfDataSet surfDataset = new SurfDataSet
            {
                SurfImageIndexRecord = observerSurfImageIndexList,
                SuperMatrix = superMatrix
            };
            sw2 = Stopwatch.StartNew();
            SurfRepository.AddSuperMatrixList(surfDataset);
            SurfRepository.SaveRepository(SurfAlgo.Flaan);
            sw2.Stop();

            logWriter(string.Format("Index tooked {0} ms. Saving Repository tooked {1} ms", sw1.ElapsedMilliseconds, sw2.ElapsedMilliseconds));
        }
Exemplo n.º 16
0
        public Image<Gray, Byte> Run(int hessianTresh, bool extended, 
            int neighbours, int emax, 
            double uniquenessThreshold,
            double scaleIncrement, int rotBins, 
            bool show)
        {
            SURFDetector detector = new SURFDetector(hessianTresh, extended); //hessianThresh=500, extended=false

            Image<Bgr, Byte> modelImageBgr = new Image<Bgr, byte>(@"images\640x480\3_purple_oval_full_cropped.bmp");//.Convert<Gray, Byte>();
            Image<Gray, Byte> modelImage = modelImageBgr.Convert<Gray, Byte>();
            //extract features from the object image
            SURFFeature[] modelFeatures = modelImage.ExtractSURF(ref detector);

            Image<Gray, Byte> observedImage = new Image<Gray, byte>(@"images\640x480\scene1.png");
            // extract features from the observed image
            SURFFeature[] imageFeatures = observedImage.ExtractSURF(ref detector);

            //Create a SURF Tracker using k-d Tree
            SURFTracker tracker = new SURFTracker(modelFeatures);
            //Comment out above and uncomment below if you wish to use spill-tree instead
            //SURFTracker tracker = new SURFTracker(modelFeatures, 50, .7, .1);

            SURFTracker.MatchedSURFFeature[] matchedFeatures = tracker.MatchFeature(imageFeatures, neighbours, emax); //neighbours=2, emax=20
            matchedFeatures = SURFTracker.VoteForUniqueness(matchedFeatures, uniquenessThreshold);//uniquenessThreshold=0.8
            matchedFeatures = SURFTracker.VoteForSizeAndOrientation(matchedFeatures, scaleIncrement, rotBins); //scaleIncrement=1.5, rotBins=20
            HomographyMatrix homography = SURFTracker.GetHomographyMatrixFromMatchedFeatures(matchedFeatures);

            //Merge the object image and the observed image into one image for display
            Image<Gray, Byte> res = modelImage.ConcateHorizontal(observedImage);

            #region draw lines between the matched features
            foreach (SURFTracker.MatchedSURFFeature matchedFeature in matchedFeatures)
            {
                PointF p = matchedFeature.ObservedFeature.Point.pt;
                p.X += modelImage.Width;
                res.Draw(new LineSegment2DF(matchedFeature.SimilarFeatures[0].Feature.Point.pt, p), new Gray(0), 1);
            }
            #endregion

            #region draw the project region on the image
            if (homography != null)
            {  //draw a rectangle along the projected model
                Rectangle rect = modelImage.ROI;
                PointF[] pts = new PointF[] {
               new PointF(rect.Left, rect.Bottom),
               new PointF(rect.Right, rect.Bottom),
               new PointF(rect.Right, rect.Top),
               new PointF(rect.Left, rect.Top)};
                homography.ProjectPoints(pts);

                for (int i = 0; i < pts.Length; i++)
                    pts[i].Y += modelImage.Height;

                res.DrawPolyline(Array.ConvertAll<PointF, Point>(pts, Point.Round), true, new Gray(255.0), 5);
            }
            #endregion

            if (show)
            {
                ImageViewer.Show(res);
            }

            return res;
        }
Exemplo n.º 17
0
        private void PerformSurfDetection(object sender, EventArgs e)
        {
            this.Text = "working...";
            Application.DoEvents();
            stopwatch.Restart();

            HomographyMatrix homographyMatrix = null;
            SURFDetector surfDetector = new SURFDetector(500, false);
            Image<Gray, Byte> imgMasterGray;
            Image<Gray, Byte> imgToFindGray;
            VectorOfKeyPoint vkpMasterKeyPoints;
            VectorOfKeyPoint vkpToFindKeyPoints;
            Matrix<float> mtxMasterDescriptors;
            Matrix<float> mtxToFindDescriptors;
            Matrix<int> mtxMatchIndices;
            Matrix<float> mtxDistance;
            Matrix<Byte> mtxMask;
            BruteForceMatcher<float> bruteForceMatcher;

            int neighbors = 2;
            double ratioUnique = 0.5;
            int nonZeroElements;
            double scaleIncrement = 1.5;
            int rotationBin = 20;
            double maxReprojectionError = 2.0;

            //PointF[] ptfPointsF;
            //Point ptPoints;

            imgMasterGray = new Image<Gray, byte>(imgMasterColor.ToBitmap());
            imgToFindGray = new Image<Gray, byte>(imgToFindColor.ToBitmap());

            vkpMasterKeyPoints = surfDetector.DetectKeyPointsRaw(imgMasterGray, null);
            mtxMasterDescriptors = surfDetector.ComputeDescriptorsRaw(imgMasterGray, null, vkpMasterKeyPoints);

            vkpToFindKeyPoints = surfDetector.DetectKeyPointsRaw(imgToFindGray, null);
            mtxToFindDescriptors = surfDetector.ComputeDescriptorsRaw(imgToFindGray, null, vkpToFindKeyPoints);

            bruteForceMatcher = new BruteForceMatcher<float>(DistanceType.L2);
            bruteForceMatcher.Add(mtxToFindDescriptors);

            mtxMatchIndices = new Matrix<int>(mtxMasterDescriptors.Rows, neighbors);
            mtxDistance = new Matrix<float>(mtxMasterDescriptors.Rows, neighbors);

            bruteForceMatcher.KnnMatch(mtxMasterDescriptors, mtxMatchIndices, mtxDistance, neighbors, null);

            mtxMask = new Matrix<byte>(mtxDistance.Rows, 1);
            mtxMask.SetValue(255);

            Features2DToolbox.VoteForUniqueness(mtxDistance, ratioUnique, mtxMask);

            nonZeroElements = CvInvoke.cvCountNonZero(mtxMask);
            if (nonZeroElements >= 4)
            {
                nonZeroElements = Features2DToolbox.VoteForSizeAndOrientation(vkpToFindKeyPoints, vkpMasterKeyPoints, mtxMatchIndices, mtxMask, scaleIncrement, rotationBin);
                if (nonZeroElements >= 4)
                {
                    homographyMatrix = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(vkpToFindKeyPoints, vkpMasterKeyPoints, mtxMatchIndices, mtxMask, maxReprojectionError);
                }
            }

            imgCopyToFind = imgToFindColor.Copy();
            imgCopyToFind.Draw(new Rectangle(1, 1, imgCopyToFind.Width - 3, imgCopyToFind.Height - 3), bgrFoundImageColor, 2);

            imgResult = imgMasterColor;
            imgResult = imgResult.ConcateHorizontal(imgCopyToFind);

            if (homographyMatrix != null)
            {
                // draw a rectangle along the projected model
                Rectangle rect = imgCopyToFind.ROI;
                PointF[] pts = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };

                homographyMatrix.ProjectPoints(pts);

                Point[] ptPoints = { Point.Round(pts[0]), Point.Round(pts[1]), Point.Round(pts[2]), Point.Round(pts[3]) };

                imgResult.DrawPolyline(ptPoints, true, bgrFoundImageColor, 2);

                int X = Convert.ToInt16((pts[0].X + pts[1].X) / 2) + this.Left;
                int Y = Convert.ToInt16((pts[1].Y + pts[2].Y) / 2) + this.Top + 30;

                LeftClick(X, Y);
            }

            stopwatch.Stop();
            //this.Text = "working time = " + stopwatch.Elapsed.TotalSeconds.ToString() + "sec, done ! ";
        }
Exemplo n.º 18
0
        /// <summary>
        /// Draw the model image and observed image, the matched features and homography projection.
        /// </summary>
        /// <param name="modelImage">The model image</param>
        /// <param name="observedImage">The observed image</param>
        /// <param name="matchTime">The output total time for computing the homography matrix.</param>
        /// <returns>The model image and observed image, the matched features and homography projection.</returns>
        public static Image<Bgr, Byte> Draw(Image<Gray, Byte> modelImage, Image<Gray, byte> observedImage, out long matchTime)
        {
            Stopwatch watch;
            HomographyMatrix homography = null;

            SURFDetector surfCPU = new SURFDetector (500, false);
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;
            Matrix<int> indices;

            Matrix<byte> mask;
            int k = 2;
            double uniquenessThreshold = 0.8;
            if (GpuInvoke.HasCuda) {
                GpuSURFDetector surfGPU = new GpuSURFDetector (surfCPU.SURFParams, 0.01f);
                using (GpuImage<Gray, Byte> gpuModelImage = new GpuImage<Gray, byte> (modelImage))
                    //extract features from the object image
                using (GpuMat<float> gpuModelKeyPoints = surfGPU.DetectKeyPointsRaw (gpuModelImage, null))
                using (GpuMat<float> gpuModelDescriptors = surfGPU.ComputeDescriptorsRaw (gpuModelImage, null, gpuModelKeyPoints))
                using (GpuBruteForceMatcher<float> matcher = new GpuBruteForceMatcher<float> (DistanceType.L2)) {
                    modelKeyPoints = new VectorOfKeyPoint ();
                    surfGPU.DownloadKeypoints (gpuModelKeyPoints, modelKeyPoints);
                    watch = Stopwatch.StartNew ();

                    // extract features from the observed image
                    using (GpuImage<Gray, Byte> gpuObservedImage = new GpuImage<Gray, byte> (observedImage))
                    using (GpuMat<float> gpuObservedKeyPoints = surfGPU.DetectKeyPointsRaw (gpuObservedImage, null))
                    using (GpuMat<float> gpuObservedDescriptors = surfGPU.ComputeDescriptorsRaw (gpuObservedImage, null, gpuObservedKeyPoints))
                    using (GpuMat<int> gpuMatchIndices = new GpuMat<int> (gpuObservedDescriptors.Size.Height, k, 1, true))
                    using (GpuMat<float> gpuMatchDist = new GpuMat<float> (gpuObservedDescriptors.Size.Height, k, 1, true))
                    using (GpuMat<Byte> gpuMask = new GpuMat<byte> (gpuMatchIndices.Size.Height, 1, 1))
                    using (Stream stream = new Stream ()) {
                        matcher.KnnMatchSingle (gpuObservedDescriptors, gpuModelDescriptors, gpuMatchIndices, gpuMatchDist, k, null, stream);
                        indices = new Matrix<int> (gpuMatchIndices.Size);
                        mask = new Matrix<byte> (gpuMask.Size);

                        //gpu implementation of voteForUniquess
                        using (GpuMat<float> col0 = gpuMatchDist.Col (0))
                        using (GpuMat<float> col1 = gpuMatchDist.Col (1)) {
                            GpuInvoke.Multiply (col1, new MCvScalar (uniquenessThreshold), col1, stream);
                            GpuInvoke.Compare (col0, col1, gpuMask, CMP_TYPE.CV_CMP_LE, stream);
                        }

                        observedKeyPoints = new VectorOfKeyPoint ();
                        surfGPU.DownloadKeypoints (gpuObservedKeyPoints, observedKeyPoints);

                        //wait for the stream to complete its tasks
                        //We can perform some other CPU intesive stuffs here while we are waiting for the stream to complete.
                        stream.WaitForCompletion ();

                        gpuMask.Download (mask);
                        gpuMatchIndices.Download (indices);

                        if (GpuInvoke.CountNonZero (gpuMask) >= 4) {
                            int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation (modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                            if (nonZeroCount >= 4)
                                homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures (modelKeyPoints, observedKeyPoints, indices, mask, 2);
                        }

                        watch.Stop ();
                    }
                }
            } else {
                //extract features from the object image
                modelKeyPoints = surfCPU.DetectKeyPointsRaw (modelImage, null);
                Matrix<float> modelDescriptors = surfCPU.ComputeDescriptorsRaw (modelImage, null, modelKeyPoints);

                watch = Stopwatch.StartNew ();

                // extract features from the observed image
                observedKeyPoints = surfCPU.DetectKeyPointsRaw (observedImage, null);
                Matrix<float> observedDescriptors = surfCPU.ComputeDescriptorsRaw (observedImage, null, observedKeyPoints);
                BruteForceMatcher<float> matcher = new BruteForceMatcher<float> (DistanceType.L2);
                matcher.Add (modelDescriptors);

                indices = new Matrix<int> (observedDescriptors.Rows, k);
                using (Matrix<float> dist = new Matrix<float> (observedDescriptors.Rows, k)) {
                    matcher.KnnMatch (observedDescriptors, indices, dist, k, null);
                    mask = new Matrix<byte> (dist.Rows, 1);
                    mask.SetValue (255);
                    Features2DToolbox.VoteForUniqueness (dist, uniquenessThreshold, mask);
                }

                int nonZeroCount = CvInvoke.cvCountNonZero (mask);
                if (nonZeroCount >= 4) {
                    nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation (modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                    if (nonZeroCount >= 4)
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures (modelKeyPoints, observedKeyPoints, indices, mask, 2);
                }

                watch.Stop ();
            }

            //Draw the matched keypoints
            Image<Bgr, Byte> result = Features2DToolbox.DrawMatches (modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                          indices, new Bgr (255, 255, 255), new Bgr (255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

            #region draw the projected region on the image
            if (homography != null) {  //draw a rectangle along the projected model
                Rectangle rect = modelImage.ROI;
                PointF[] pts = new PointF[] {
                    new PointF (rect.Left, rect.Bottom),
                    new PointF (rect.Right, rect.Bottom),
                    new PointF (rect.Right, rect.Top),
                    new PointF (rect.Left, rect.Top)
                };
                homography.ProjectPoints (pts);

                result.DrawPolyline (Array.ConvertAll<PointF, Point> (pts, Point.Round), true, new Bgr (Color.Red), 5);
            }
            #endregion

            matchTime = watch.ElapsedMilliseconds;

            return result;
        }
Exemplo n.º 19
0
        public Image<Bgr, byte> DrawResult(Image<Gray, byte> modelImage, Image<Gray, byte> observedImage,out double area, int minarea, out Point center)
        {
            //double estimated_dist =99999;
            center = new Point(400, 224);
            area = 0;
            //modelImage.Save("D:\\temp\\modelimage.jpg");
            //observedImage.Save("D:\\temp\\observedimage.jpg");

            //单应矩阵
            HomographyMatrix homography = null;

            //surf算法检测器
            var surfCpu = new SURFDetector(500, false);

            //原图与实际图中的关键点

            Matrix<byte> mask;

            //knn匹配的系数
            var k = 2;
            //滤波系数
            var uniquenessThreshold = 0.8;

            //从标记图中,提取surf特征点与描述子
            var modelKeyPoints = surfCpu.DetectKeyPointsRaw(modelImage, null);
            var modelDescriptors = surfCpu.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

            // 从实际图片提取surf特征点与描述子
            var observedKeyPoints = surfCpu.DetectKeyPointsRaw(observedImage, null);
            var observedDescriptors = surfCpu.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);
            if (observedDescriptors == null)
            {

                return null;
            }

            //使用BF匹配算法,匹配特征向量
            //var bfmatcher = new BruteForceMatcher<float>(DistanceType.L2);
            //bfmatcher.Add(modelDescriptors);
            var indices = new Matrix<int>(observedDescriptors.Rows, k);
            var flannMatcher = new Index(modelDescriptors, 4);
            //通过特征向量筛选匹配对
            using (var dist = new Matrix<float>(observedDescriptors.Rows, k))
            {
                //最近邻2点特征向量匹配
                //bfmatcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                flannMatcher.KnnSearch(observedDescriptors, indices, dist, k, 24);
                //匹配成功的,将特征点存入mask
                mask = new Matrix<byte>(dist.Rows, 1);
                mask.SetValue(255);
                //通过滤波系数,过滤非特征点,剩余特征点存入mask
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }

            var nonZeroCount = CvInvoke.cvCountNonZero(mask);
            if (nonZeroCount >= 10)
            {
                //过滤旋转与变形系数异常的特征点,剩余存入mask
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices,
                    mask, 1.5, 20);
                if (nonZeroCount >= 10)
                    //使用剩余特征点,构建单应矩阵
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                        observedKeyPoints, indices, mask, 2);
            }

            // }

            //画出匹配的特征点
            //Image<Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,indices, new Bgr(0, 0, 255), new Bgr(0, 255, 0), mask, Features2DToolbox.KeypointDrawType.DEFAULT);
            // result.Save("D:\\temp\\matchedpoints.jpg");
            observedImage.ToBitmap();
            var result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                indices, new Bgr(0, 0, 255), new Bgr(0, 255, 0), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

            #region draw the projected region on the Image

            //画出单应矩阵
            if (homography != null)
            {
                var rect = modelImage.ROI;
                /*PointF[] pts = new PointF[] {
               new PointF(rect.Left, rect.Bottom),
               new PointF(rect.Right, rect.Bottom),
               new PointF(rect.Right, rect.Top),
               new PointF(rect.Left, rect.Top)
                    };*/
                var pts = new[]
                {
                    new PointF(rect.Left + (rect.Right - rect.Left)/5, rect.Bottom - (rect.Bottom - rect.Top)/5),
                    new PointF(rect.Right - (rect.Right - rect.Left)/5, rect.Bottom - (rect.Bottom - rect.Top)/5),
                    new PointF(rect.Right - (rect.Right - rect.Left)/5, rect.Top + (rect.Bottom - rect.Top)/5),
                    new PointF(rect.Left + (rect.Right - rect.Left)/5, rect.Top + (rect.Bottom - rect.Top)/5)
                };
                //根据整个图片的旋转、变形情况,计算出原图中四个顶点转换后的坐标,并画出四边形
                homography.ProjectPoints(pts);
                area = Getarea(pts);
                double xsum = 0;
                double ysum = 0;
                foreach (var point in pts)
                {
                    xsum += point.X;
                    ysum += point.Y;
                }
                center = new Point(Convert.ToInt32(xsum / 4), Convert.ToInt32(ysum / 4));
                if (area > minarea)
                {
                    var temp = new Image<Bgr, byte>(result.Width, result.Height);
                    temp.DrawPolyline(Array.ConvertAll(pts, Point.Round), true, new Bgr(Color.Red), 5);
                    //estimated_dist = GetDist(pts);

                    var a = CountContours(temp.ToBitmap());
                    if (a == 2)
                    {
                        result.DrawPolyline(Array.ConvertAll(pts, Point.Round), true, new Bgr(Color.Red), 5);
                        //result.Save("D:\\temp\\" + estimated_dist.ToString() + ".jpg");
                    }
                    else
                    {
                        area = 0; //dst = estimated_dist;
                        return result;
                    }
                }
            }
            else area = 0;

            #endregion

            //dst = estimated_dist;
            return result;
        }
Exemplo n.º 20
0
 private extern static void CvSURFDetectorDetectFeature(
     ref SURFDetector detector,
     IntPtr image,
     IntPtr mask,
     IntPtr keypoints,
     IntPtr descriptors);
Exemplo n.º 21
0
 private extern static void CvSURFDetectorDetectKeyPoints(
     ref SURFDetector detector,
     IntPtr image,
     IntPtr mask,
     IntPtr keypoints);
Exemplo n.º 22
0
        private static bool IsModelInObserved( Image<Gray, byte> modelImage, Image<Gray, byte> observedImage, double similarityThreshold = 0.075 )
        {
            var surfCpu = new SURFDetector(500, false);

             Matrix<byte> mask;
             int k = 2;
             double uniquenessThreshold = 0.8;

             //extract features from the object image
             var modelKeyPoints = surfCpu.DetectKeyPointsRaw( modelImage, null );
             Matrix<float> modelDescriptors = surfCpu.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

             // extract features from the observed image
             var observedKeyPoints = surfCpu.DetectKeyPointsRaw( observedImage, null );
             Matrix<float> observedDescriptors = surfCpu.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);
             BruteForceMatcher<float> matcher = new BruteForceMatcher<float>(DistanceType.L2);
             matcher.Add( modelDescriptors );

             var indices = new Matrix<int>( observedDescriptors.Rows, k );
             using ( var dist = new Matrix<float>( observedDescriptors.Rows, k ) )
             {
            matcher.KnnMatch( observedDescriptors, indices, dist, k, null );
            mask = new Matrix<byte>( dist.Rows, 1 );
            mask.SetValue( 255 );
            Features2DToolbox.VoteForUniqueness( dist, uniquenessThreshold, mask );
             }

             int keypointMatchCount = CvInvoke.cvCountNonZero( mask );
             if ( keypointMatchCount >= 4 )
             {
            keypointMatchCount = Features2DToolbox.VoteForSizeAndOrientation( modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20 );
            if ( keypointMatchCount >= 4 )
            {
               Features2DToolbox.GetHomographyMatrixFromMatchedFeatures( modelKeyPoints, observedKeyPoints, indices, mask, 2 );
            }
             }

             var similarity = (double)keypointMatchCount / observedKeyPoints.Size;
             return similarity > similarityThreshold;
        }
Exemplo n.º 23
0
 private extern static void CvSURFDetectorDetectFeature(
    ref SURFDetector detector,
    IntPtr image,
    IntPtr mask,
    IntPtr keypoints,
    IntPtr descriptors);
Exemplo n.º 24
0
 private extern static void CvSURFDetectorDetectKeyPoints(
    ref SURFDetector detector,
    IntPtr image,
    IntPtr mask,
    IntPtr keypoints);
Exemplo n.º 25
0
        public static void FindMatch(Image<Gray, Byte> modelImage, Image<Gray, byte> observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, out Matrix<int> indices, out Matrix<byte> mask, out HomographyMatrix homography)
        {
            int k = 2;
              double uniquenessThreshold = 0.8;
              SURFDetector surfCPU = new SURFDetector(500, false);
              Stopwatch watch;
              homography = null;

              //extract features from the object image
              modelKeyPoints = new VectorOfKeyPoint();
              Matrix<float> modelDescriptors = surfCPU.DetectAndCompute(modelImage, null, modelKeyPoints);

              watch = Stopwatch.StartNew();

              // extract features from the observed image
              observedKeyPoints = new VectorOfKeyPoint();
              Matrix<float> observedDescriptors = surfCPU.DetectAndCompute(observedImage, null, observedKeyPoints);
              BruteForceMatcher<float> matcher = new BruteForceMatcher<float>(DistanceType.L2);
              matcher.Add(modelDescriptors);

              indices = new Matrix<int>(observedDescriptors.Rows, k);
              using (Matrix<float> dist = new Matrix<float>(observedDescriptors.Rows, k))
              {
              matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
              mask = new Matrix<byte>(dist.Rows, 1);
              mask.SetValue(255);
              Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
              }

              int nonZeroCount = CvInvoke.cvCountNonZero(mask);
              if (nonZeroCount >= 4)
              {
              nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
              if (nonZeroCount >= 4)
                 homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
              }
              watch.Stop();

              matchTime = watch.ElapsedMilliseconds;
        }
Exemplo n.º 26
0
        public static bool FindModelImageInObservedImage( Image<Gray, byte> modelImage, Image<Gray, byte> observedImage )
        {
            var surfCpu = new SURFDetector(500, false);
             VectorOfKeyPoint modelKeyPoints;
             VectorOfKeyPoint observedKeyPoints;
             Matrix<int> indices;

             Matrix<byte> mask;
             int k = 2;
             double uniquenessThreshold = 0.8;
             if ( GpuInvoke.HasCuda )
             {
            GpuSURFDetector surfGpu = new GpuSURFDetector(surfCpu.SURFParams, 0.01f);
            using ( GpuImage<Gray, byte> gpuModelImage = new GpuImage<Gray, byte>( modelImage ) )
            //extract features from the object image
            using ( GpuMat<float> gpuModelKeyPoints = surfGpu.DetectKeyPointsRaw( gpuModelImage, null ) )
            using ( GpuMat<float> gpuModelDescriptors = surfGpu.ComputeDescriptorsRaw( gpuModelImage, null, gpuModelKeyPoints ) )
            using ( GpuBruteForceMatcher<float> matcher = new GpuBruteForceMatcher<float>( DistanceType.L2 ) )
            {
               modelKeyPoints = new VectorOfKeyPoint();
               surfGpu.DownloadKeypoints( gpuModelKeyPoints, modelKeyPoints );

               // extract features from the observed image
               using ( GpuImage<Gray, byte> gpuObservedImage = new GpuImage<Gray, byte>( observedImage ) )
               using ( GpuMat<float> gpuObservedKeyPoints = surfGpu.DetectKeyPointsRaw( gpuObservedImage, null ) )
               using ( GpuMat<float> gpuObservedDescriptors = surfGpu.ComputeDescriptorsRaw( gpuObservedImage, null, gpuObservedKeyPoints ) )
               using ( GpuMat<int> gpuMatchIndices = new GpuMat<int>( gpuObservedDescriptors.Size.Height, k, 1, true ) )
               using ( GpuMat<float> gpuMatchDist = new GpuMat<float>( gpuObservedDescriptors.Size.Height, k, 1, true ) )
               using ( GpuMat<Byte> gpuMask = new GpuMat<byte>( gpuMatchIndices.Size.Height, 1, 1 ) )
               using ( var stream = new Emgu.CV.GPU.Stream() )
               {
                  matcher.KnnMatchSingle( gpuObservedDescriptors, gpuModelDescriptors, gpuMatchIndices, gpuMatchDist, k, null, stream );
                  indices = new Matrix<int>( gpuMatchIndices.Size );
                  mask = new Matrix<byte>( gpuMask.Size );

                  //gpu implementation of voteForUniquess
                  using ( GpuMat<float> col0 = gpuMatchDist.Col( 0 ) )
                  using ( GpuMat<float> col1 = gpuMatchDist.Col( 1 ) )
                  {
                     GpuInvoke.Multiply( col1, new MCvScalar( uniquenessThreshold ), col1, stream );
                     GpuInvoke.Compare( col0, col1, gpuMask, CMP_TYPE.CV_CMP_LE, stream );
                  }

                  observedKeyPoints = new VectorOfKeyPoint();
                  surfGpu.DownloadKeypoints( gpuObservedKeyPoints, observedKeyPoints );

                  //wait for the stream to complete its tasks
                  //We can perform some other CPU intesive stuffs here while we are waiting for the stream to complete.
                  stream.WaitForCompletion();

                  gpuMask.Download( mask );
                  gpuMatchIndices.Download( indices );

                  if ( GpuInvoke.CountNonZero( gpuMask ) >= 4 )
                  {
                     int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                     if ( nonZeroCount >= 4 )
                     {
                        Features2DToolbox.GetHomographyMatrixFromMatchedFeatures( modelKeyPoints, observedKeyPoints, indices, mask, 2 );
                     }
                     if ( (double)nonZeroCount / mask.Height > 0.02 )
                     {
                        return true;
                     }
                  }
               }
            }
             }
             else
             {
            //extract features from the object image
            modelKeyPoints = surfCpu.DetectKeyPointsRaw( modelImage, null );
            Matrix<float> modelDescriptors = surfCpu.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

            // extract features from the observed image
            observedKeyPoints = surfCpu.DetectKeyPointsRaw( observedImage, null );
            Matrix<float> observedDescriptors = surfCpu.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);
            BruteForceMatcher<float> matcher = new BruteForceMatcher<float>(DistanceType.L2);
            matcher.Add( modelDescriptors );

            indices = new Matrix<int>( observedDescriptors.Rows, k );
            using ( Matrix<float> dist = new Matrix<float>( observedDescriptors.Rows, k ) )
            {
               matcher.KnnMatch( observedDescriptors, indices, dist, k, null );
               mask = new Matrix<byte>( dist.Rows, 1 );
               mask.SetValue( 255 );
               Features2DToolbox.VoteForUniqueness( dist, uniquenessThreshold, mask );
            }

            int nonZeroCount = CvInvoke.cvCountNonZero(mask);
            if ( nonZeroCount >= 4 )
            {
               nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation( modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20 );
               if ( nonZeroCount >= 4 )
               {
                  Features2DToolbox.GetHomographyMatrixFromMatchedFeatures( modelKeyPoints, observedKeyPoints, indices, mask, 2 );
               }
            }

            if ( (double)nonZeroCount/mask.Height > 0.02 )
            {
               return true;
            }
             }

             //Draw the matched keypoints
             //var result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints, indices, new Bgr(0, 0, 255), new Bgr(255, 0, 0), mask, Features2DToolbox.KeypointDrawType.DEFAULT);
             //result.Save( @"C:\Users\D.Markachev\Desktop\bleh-keypoints.jpg" );

             return false;
        }
        /// <summary>
        /// Draw the model image and observed image, the matched features and homography projection.
        /// </summary>
        /// <param name="modelImageFileName">The model image</param>
        /// <param name="observedImageBitmap">The observed image</param>
        /// <param name="matchTime">The output total time for computing the homography matrix.</param>
        /// <returns>The model image and observed image, the matched features and homography projection.</returns>
        private System.Drawing.Point[] DrawBruteForceMatch(String modelImageFileName, Bitmap observedImageBitmap, out long matchTime)
            {
                try
                {
                    Image<Gray, Byte> modelImage = new Image<Gray, byte>(modelImageFileName);
                    Image<Gray, Byte> observedImage = new Image<Gray, byte>(observedImageBitmap);
                    HomographyMatrix homography = null;
                    Stopwatch watch;
                    SURFDetector surfCPU = new SURFDetector(500, false);
                    VectorOfKeyPoint modelKeyPoints;
                    VectorOfKeyPoint observedKeyPoints;
                    Matrix<int> indices;

                    Matrix<byte> mask;
                    int k = 2;
                    double uniquenessThreshold = 0.8;

                    //extract features from the object image
                    modelKeyPoints = surfCPU.DetectKeyPointsRaw(modelImage, null);
                    Matrix<float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

                    watch = Stopwatch.StartNew();

                    // extract features from the observed image
                    observedKeyPoints = surfCPU.DetectKeyPointsRaw(observedImage, null);
                    Matrix<float> observedDescriptors = surfCPU.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);
                    BruteForceMatcher<float> matcher = new BruteForceMatcher<float>(DistanceType.L2);
                    matcher.Add(modelDescriptors);

                    indices = new Matrix<int>(observedDescriptors.Rows, k);
                    Matrix<float> dist = new Matrix<float>(observedDescriptors.Rows, k);
                    matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                    mask = new Matrix<byte>(dist.Rows, 1);
                    mask.SetValue(255);
                    Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);

                    int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                    if (nonZeroCount >= 4)
                    {
                        nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                        if (nonZeroCount >= 4)
                            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                    }
                    watch.Stop();

                    //Draw the matched keypoints
                    Image<Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                                            indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

                    System.Drawing.Point[] newpts = null;
                    #region draw the projected region on the image
                    if (homography != null)
                    {
                        //draw a rectangle along the projected model
                        Rectangle rect = modelImage.ROI;
                        PointF[] pts = new PointF[] { 
                                                               new PointF(rect.Left, rect.Bottom),
                                                               new PointF(rect.Right, rect.Bottom),
                                                               new PointF(rect.Right, rect.Top),
                                                               new PointF(rect.Left, rect.Top)};
                        homography.ProjectPoints(pts);
                        //result.DrawPolyline(Array.ConvertAll<PointF, System.Drawing.Point>(pts, System.Drawing.Point.Round), true, new Bgr(Color.Red), 2);
                        //result.Save(@"E:\1.jpg");
                        newpts = Array.ConvertAll<PointF, System.Drawing.Point>(pts, System.Drawing.Point.Round);

                    }
                    #endregion
                    matchTime = watch.ElapsedMilliseconds;
                    return newpts;
                }
                catch (Exception e)
                {
                    Console.WriteLine(e.Message);
                    matchTime = 0;
                    return new System.Drawing.Point[] { new System.Drawing.Point(-1, -1), new System.Drawing.Point(-1, -1), new System.Drawing.Point(-1, -1), new System.Drawing.Point(-1, -1) };
                }
        }
Exemplo n.º 28
0
        public static void FindMatch(Image<Gray, Byte> modelImage, Image<Gray, byte> observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, out Matrix<int> indices, out Matrix<byte> mask, out HomographyMatrix homography)
        {
            int k = 2;
             double uniquenessThreshold = 0.8;
             SURFDetector surfCPU = new SURFDetector(500, false);
             Stopwatch watch;
             homography = null;
             #if !IOS
             if (GpuInvoke.HasCuda)
             {
            GpuSURFDetector surfGPU = new GpuSURFDetector(surfCPU.SURFParams, 0.01f);
            using (GpuImage<Gray, Byte> gpuModelImage = new GpuImage<Gray, byte>(modelImage))
            //extract features from the object image
            using (GpuMat<float> gpuModelKeyPoints = surfGPU.DetectKeyPointsRaw(gpuModelImage, null))
            using (GpuMat<float> gpuModelDescriptors = surfGPU.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
            using (GpuBruteForceMatcher<float> matcher = new GpuBruteForceMatcher<float>(DistanceType.L2))
            {
               modelKeyPoints = new VectorOfKeyPoint();
               surfGPU.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);
               watch = Stopwatch.StartNew();

               // extract features from the observed image
               using (GpuImage<Gray, Byte> gpuObservedImage = new GpuImage<Gray, byte>(observedImage))
               using (GpuMat<float> gpuObservedKeyPoints = surfGPU.DetectKeyPointsRaw(gpuObservedImage, null))
               using (GpuMat<float> gpuObservedDescriptors = surfGPU.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints))
               using (GpuMat<int> gpuMatchIndices = new GpuMat<int>(gpuObservedDescriptors.Size.Height, k, 1, true))
               using (GpuMat<float> gpuMatchDist = new GpuMat<float>(gpuObservedDescriptors.Size.Height, k, 1, true))
               using (GpuMat<Byte> gpuMask = new GpuMat<byte>(gpuMatchIndices.Size.Height, 1, 1))
               using (Stream stream = new Stream())
               {
                  matcher.KnnMatchSingle(gpuObservedDescriptors, gpuModelDescriptors, gpuMatchIndices, gpuMatchDist, k, null, stream);
                  indices = new Matrix<int>(gpuMatchIndices.Size);
                  mask = new Matrix<byte>(gpuMask.Size);

                  //gpu implementation of voteForUniquess
                  using (GpuMat<float> col0 = gpuMatchDist.Col(0))
                  using (GpuMat<float> col1 = gpuMatchDist.Col(1))
                  {
                     GpuInvoke.Multiply(col1, new MCvScalar(uniquenessThreshold), col1, stream);
                     GpuInvoke.Compare(col0, col1, gpuMask, CMP_TYPE.CV_CMP_LE, stream);
                  }

                  observedKeyPoints = new VectorOfKeyPoint();
                  surfGPU.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);

                  //wait for the stream to complete its tasks
                  //We can perform some other CPU intesive stuffs here while we are waiting for the stream to complete.
                  stream.WaitForCompletion();

                  gpuMask.Download(mask);
                  gpuMatchIndices.Download(indices);

                  if (GpuInvoke.CountNonZero(gpuMask) >= 4)
                  {
                     int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                     if (nonZeroCount >= 4)
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
                  }

                  watch.Stop();
               }
            }
             }
             else
             #endif
             {
            //extract features from the object image
            modelKeyPoints = new VectorOfKeyPoint();
            Matrix<float> modelDescriptors = surfCPU.DetectAndCompute(modelImage, null, modelKeyPoints);

            watch = Stopwatch.StartNew();

            // extract features from the observed image
            observedKeyPoints = new VectorOfKeyPoint();
            Matrix<float> observedDescriptors = surfCPU.DetectAndCompute(observedImage, null, observedKeyPoints);
            BruteForceMatcher<float> matcher = new BruteForceMatcher<float>(DistanceType.L2);
            matcher.Add(modelDescriptors);

            indices = new Matrix<int>(observedDescriptors.Rows, k);
            using (Matrix<float> dist = new Matrix<float>(observedDescriptors.Rows, k))
            {
               matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
               mask = new Matrix<byte>(dist.Rows, 1);
               mask.SetValue(255);
               Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }

            int nonZeroCount = CvInvoke.cvCountNonZero(mask);
            if (nonZeroCount >= 4)
            {
               nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
               if (nonZeroCount >= 4)
                  homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
            }

            watch.Stop();
             }
             matchTime = watch.ElapsedMilliseconds;
        }
Exemplo n.º 29
0
      public Image<Bgr, float> alignment(Image<Bgr, float> fImage, Image<Bgr, float> lImage, Boolean qrCode)
      {
          HomographyMatrix homography = null;
          SURFDetector surfCPU = new SURFDetector(500, false);
          VectorOfKeyPoint modelKeyPoints;
          VectorOfKeyPoint observedKeyPoints;
          Matrix<int> indices;

          Matrix<byte> mask;

          int k = 2;
          double uniquenessThreshold = 0.8;


          Image<Gray, Byte> fImageG = fImage.Convert<Gray, Byte>();
          Image<Gray, Byte> lImageG = lImage.Convert<Gray, Byte>();

          //extract features from the object image
          modelKeyPoints = new VectorOfKeyPoint();
          Matrix<float> modelDescriptors = surfCPU.DetectAndCompute(fImageG, null, modelKeyPoints);


          // extract features from the observed image
          observedKeyPoints = new VectorOfKeyPoint();
          Matrix<float> observedDescriptors = surfCPU.DetectAndCompute(lImageG, null, observedKeyPoints);
          BruteForceMatcher<float> matcher = new BruteForceMatcher<float>(DistanceType.L2);
          matcher.Add(modelDescriptors);

          indices = new Matrix<int>(observedDescriptors.Rows, k);
          
          using (Matrix<float> dist = new Matrix<float>(observedDescriptors.Rows, k))
          {
              
              matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
              mask = new Matrix<byte>(dist.Rows, 1);
              mask.SetValue(255);
              
              Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
          }

          
          int nonZeroCount = CvInvoke.cvCountNonZero(mask);
          
          if (nonZeroCount >= 4)
          {
              nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
              if (nonZeroCount >= 4)
                  
                  homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2);
          }



          if (!qrCode && homography.Sum > 0)
          {
              throw new Exception();
          }
          //Console.WriteLine("h**o: " + indices.Size + " ," + homography.Size+ " "+homography.Sum);
          Image<Bgr, Byte> result = Features2DToolbox.DrawMatches(fImageG, modelKeyPoints, lImageG, observedKeyPoints,
           indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);
          if (homography != null)
          {
              Console.Write("homoegraphy is not null");
              //draw a rectangle along the projected model
              Rectangle rect = fImageG.ROI;
              PointF[] pts = new PointF[] { 
               new PointF(rect.Left, rect.Bottom),
               new PointF(rect.Right, rect.Bottom),
               new PointF(rect.Right, rect.Top),
               new PointF(rect.Left, rect.Top)};
              homography.ProjectPoints(pts);

              result.DrawPolyline(Array.ConvertAll<PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 5);
              result.Save("resultqr.jpg");
              //mage<Bgr, byte> mosaic = new Image<Bgr, byte>(fImageG.Width + fImageG.Width, fImageG.Height);
              //Image<Bgr, byte> warp_image = mosaic.Clone();
              Image<Bgr, float> result2 = new Image<Bgr, float>(fImage.Size);
              //Image<Gray, Byte> result3 = new Image<Gray, Byte>(fImage.Size);
              CvInvoke.cvWarpPerspective(fImage.Ptr, result2, homography.Ptr, (int)INTER.CV_INTER_CUBIC + (int)WARP.CV_WARP_FILL_OUTLIERS, new MCvScalar(0));
              //CvInvoke.cvWarpPerspective(fImage.Ptr, result2, homography.Ptr, (int)INTER.CV_INTER_CUBIC + (int)WARP.CV_WARP_INVERSE_MAP, new MCvScalar(0));
              return result2;
          }
          else
          {
              Console.WriteLine("homography is null");
          }
          return null;
      }
Exemplo n.º 30
0
        private void extractData()
        {
            SURFDetector surfCPU = new SURFDetector(500, false);

            _objectKeyPoints = surfCPU.DetectKeyPointsRaw(objectImage, null);
            _objectDescriptors = surfCPU.ComputeDescriptorsRaw(objectImage, null, objectKeyPoints);
        }
Exemplo n.º 31
0
        public List<ImageRecord> QueryImage(string queryImagePath, out string messageToLog, SurfSettings surfSetting = null)
        {
            List<ImageRecord> rtnImageList = new List<ImageRecord>();

            #region Diagnostic Region
            Stopwatch sw = new Stopwatch();
            long IndexingTime = 0; long QueryingTime = 0; long LoopTime = 0;
            #endregion Diagnostic Region

            SurfDataSet observerDataset = SurfRepository.GetSurfDataSet();
            if (observerDataset == null)
                throw new InvalidOperationException("Can't get the Surf Index, please index first");

            #region Surf Dectator Region
            double hessianThresh = 500;
            double uniquenessThreshold = 0.8;

            if (surfSetting != null)
            {
                hessianThresh = surfSetting.HessianThresh.Value;
                uniquenessThreshold = surfSetting.UniquenessThreshold.Value;
            }

            SURFDetector surfDectector = new SURFDetector(hessianThresh, false);
            #endregion Surf Dectator Region

            Matrix<float> modelDescriptors;

            using (Image<Gray, byte> modelImage = new Image<Gray, byte>(queryImagePath))
            {
                VectorOfKeyPoint modelKeyPoints = new VectorOfKeyPoint();
                modelDescriptors = surfDectector.DetectAndCompute(modelImage, null, modelKeyPoints);
                if (modelDescriptors.Rows < 4) throw new InvalidOperationException("Model image didn't have any significant features to detect");
                Matrix<float> superMatrix = observerDataset.SuperMatrix;

                sw.Start();
                Emgu.CV.Flann.Index flannIndex;
                if (!SurfRepository.Exists("flannIndex"))
                {
                    flannIndex = new Emgu.CV.Flann.Index(superMatrix, 4);
                    SurfRepository.AddFlannIndex(flannIndex, "flannIndex");
                }
                else
                    flannIndex = SurfRepository.GetFlannIndex("flannIndex");

                sw.Stop(); IndexingTime = sw.ElapsedMilliseconds; sw.Reset();

                var indices = new Matrix<int>(modelDescriptors.Rows, 2); // matrix that will contain indices of the 2-nearest neighbors found
                var dists = new Matrix<float>(modelDescriptors.Rows, 2); // matrix that will contain distances to the 2-nearest neighbors found

                sw.Start();
                flannIndex.KnnSearch(modelDescriptors, indices, dists, 2, 24);
                sw.Stop(); QueryingTime = sw.ElapsedMilliseconds; sw.Reset();

                List<SURFRecord1> imageList = observerDataset.SurfImageIndexRecord;
                imageList.ForEach(x => x.Distance = 0);

                //Create Interval Tree for Images
                IntervalTreeHelper.CreateTree(imageList);

                sw.Start();
                for (int i = 0; i < indices.Rows; i++)
                {
                    // filter out all inadequate pairs based on distance between pairs
                    if (dists.Data[i, 0] < (uniquenessThreshold * dists.Data[i, 1]))
                    {
                        var img = IntervalTreeHelper.GetImageforRange(indices[i, 0]);
                        if (img != null) img.Distance++;
                    }
                }
                sw.Stop(); LoopTime = sw.ElapsedMilliseconds;

                string msg = String.Format("Indexing: {0}, Querying: {1}, Looping: {2}", IndexingTime, QueryingTime, LoopTime);
                messageToLog = msg;

                rtnImageList = imageList.Where(x => x.Distance > surfSetting.GoodMatchThreshold).OrderByDescending(x => x.Distance).Select(x => (ImageRecord)x).ToList();
            }

            return rtnImageList;
        }
Exemplo n.º 32
0
        private List<LoCaTeRanker> PerformExtendedSurfSearch(string queryImagePath, List<LoCaTeRanker> ImageList)
        {
            //If no images are found in Locate, return
            if (ImageList.Count == 0)
                return ImageList;

            SURFDetector surfDectector = new SURFDetector(1000, false);
            Matrix<float> superMatrix = null;
            List<SURFRecord1> observerSurfImageIndexList = new List<SURFRecord1>();

            #region Computing Model Descriptors
            Matrix<float> modelDescriptors;
            VectorOfKeyPoint modelKeyPoints = new VectorOfKeyPoint();
            using (Image<Gray, byte> modelImage = new Image<Gray, byte>(queryImagePath))
            {
                modelDescriptors = surfDectector.DetectAndCompute(modelImage, null, modelKeyPoints);
            }
            #endregion

            #region Computing Surf Descriptors
            int rows = 0;

            int numProcs = Environment.ProcessorCount;
            int concurrencyLevel = numProcs * 2;
            ConcurrentDictionary<long, Matrix<float>> obserableSurfPoints = new ConcurrentDictionary<long, Matrix<float>>(concurrencyLevel, ImageList.Count);

            Parallel.ForEach(ImageList, img =>
            {
                string imagePath = img.ImagePath;
                using (Image<Gray, byte> observerImage = new Image<Gray, byte>(imagePath))
                {
                    VectorOfKeyPoint observerKeyPoints = new VectorOfKeyPoint();
                    Matrix<float> observerDescriptors = surfDectector.DetectAndCompute(observerImage, null, observerKeyPoints);
                    obserableSurfPoints.TryAdd(img.Id, observerDescriptors);
                }
            });

            foreach (var rec in ImageList)
            {
                Matrix<float> observerDescriptors = obserableSurfPoints[rec.Id];
                 if (superMatrix != null)
                     superMatrix = superMatrix.ConcateVertical(observerDescriptors);
                 else
                     superMatrix = observerDescriptors;

                 int initRow = rows; int endRows = rows + observerDescriptors.Rows - 1;
                 observerSurfImageIndexList.Add(new SURFRecord1
                 {
                     Id = rec.Id,
                     ImageName = rec.ImageName,
                     ImagePath = rec.ImagePath,
                     IndexStart = rows,
                     IndexEnd = endRows,
                     Distance = 0
                 });
                 rows = endRows + 1;
            }

            //foreach (var rec in ImageList)
            //{
            //    string imagePath = rec.ImagePath;
            //    using (Image<Gray, byte> observerImage = new Image<Gray, byte>(imagePath))
            //    {
            //        VectorOfKeyPoint observerKeyPoints = new VectorOfKeyPoint();

            //        Matrix<float> observerDescriptors = surfDectector.DetectAndCompute(observerImage, null, observerKeyPoints);
            //        if (superMatrix != null)
            //            superMatrix = superMatrix.ConcateVertical(observerDescriptors);
            //        else
            //            superMatrix = observerDescriptors;

            //        int initRow = rows; int endRows = rows + observerDescriptors.Rows - 1;
            //        observerSurfImageIndexList.Add(new SURFRecord1
            //        {
            //            Id = rec.Id,
            //            ImageName = rec.ImageName,
            //            ImagePath = rec.ImagePath,
            //            IndexStart = rows,
            //            IndexEnd = endRows,
            //            Distance = 0
            //        });
            //        rows = endRows + 1;
            //    }
            //}
            #endregion

            Emgu.CV.Flann.Index flannIndex = new Emgu.CV.Flann.Index(superMatrix, 4);
            var indices = new Matrix<int>(modelDescriptors.Rows, 2); // matrix that will contain indices of the 2-nearest neighbors found
            var dists = new Matrix<float>(modelDescriptors.Rows, 2); // matrix that will contain distances to the 2-nearest neighbors found
            flannIndex.KnnSearch(modelDescriptors, indices, dists, 2, 24);

            IntervalTreeHelper.CreateTree(observerSurfImageIndexList);
            for (int i = 0; i < indices.Rows; i++)
            {
                // filter out all inadequate pairs based on distance between pairs
                if (dists.Data[i, 0] < (0.3 * dists.Data[i, 1]))
                {
                    var img = IntervalTreeHelper.GetImageforRange(indices[i, 0]);
                    if (img != null) img.Distance++;
                }
            }
            int maxMatch = Convert.ToInt32(observerSurfImageIndexList.Select(rec => rec.Distance).Max());
            observerSurfImageIndexList = observerSurfImageIndexList.OrderByDescending(rec => rec.Distance).ToList();
            int totalImageCount = observerSurfImageIndexList.Count;
            for (int i = 0; i < totalImageCount; i++)
            {
                long id = observerSurfImageIndexList[i].Id;
                var img2 = ImageList.Where(img => img.Id == id).SingleOrDefault();
                if (img2 != null)
                {
                    double countofMatch = observerSurfImageIndexList[i].Distance;
                    if (countofMatch > 0)
                    {
                        img2.SurfRank = (totalImageCount - i);
                    }
                }

            }

            return ImageList;
        }