This class use ImageFeature to match or track object
Наследование: DisposableObject
Пример #1
0
 public SignDetector(Image<Bgr, Byte> stopSignModel)
 {
     _detector2 = new SURFDetector(500, false);
     using (Image<Gray, Byte> redMask = GetColorPixelMask(stopSignModel))
     {
         try
         {
             _tracker2 = new Features2DTracker<float>(_detector2.DetectFeatures(redMask, null));
         }
         catch { }
     }
     _octagonStorage2 = new MemStorage();
     _octagon2 = new Contour<Point>(_octagonStorage2);
     _octagon2.PushMulti(new Point[] { 
         //hexagon
         new Point(1, 0),
         new Point(2, 0),
         new Point(3, 1),
         new Point(2, 2),
         new Point(1, 2),
         new Point(0, 1)},
         //octagon
     //new Point(1, 0),
     //new Point(2, 0),
     //new Point(3, 1),
     //new Point(3, 2),
     //new Point(2, 3),
     //new Point(1, 3),
     //new Point(0, 2),
     //new Point(0, 1)},
        Emgu.CV.CvEnum.BACK_OR_FRONT.FRONT);
 }
Пример #2
0
        public DetectorResult Process(Image<Bgr, byte> rawFrame, Image<Gray, byte> grayFrame)
        {
            var surfParam = new SurfDetector(900, true);

            var modelImage = new Image<Gray, byte>("iphone\\signin.jpg");
            var modelFeatures = surfParam.DetectFeatures(modelImage, null);
            var tracker = new Features2DTracker(modelFeatures);

            var imageFeatures = surfParam.DetectFeatures(grayFrame, null);
            var homographyMatrix = tracker.Detect(imageFeatures, 100.0);

            Image<Bgr, Byte> processedImage = modelImage.Convert<Bgr, Byte>().ConcateVertical(rawFrame);

            if (homographyMatrix != null)
            {
                var rect = modelImage.ROI;
                var pts = new[]
                              {
                                  new PointF(rect.Left, rect.Bottom),
                                  new PointF(rect.Right, rect.Bottom),
                                  new PointF(rect.Right, rect.Top),
                                  new PointF(rect.Left, rect.Top)
                              };
                homographyMatrix.ProjectPoints(pts);

                for (int i = 0; i < pts.Length; i++)
                    pts[i].Y += modelImage.Height;

                processedImage.DrawPolyline(Array.ConvertAll(pts, Point.Round), true, new Bgr(Color.DarkOrange), 1);
            }
            return new DetectorResult(){RawImage = rawFrame, ProcessedImage = processedImage};
        }
Пример #3
0
 /// <summary>
 /// Detect image features from the given image
 /// </summary>
 /// <param name="image">The image to detect features from</param>
 /// <param name="mask">The optional mask, can be null if not needed</param>
 /// <returns>The Image features detected from the given image</returns>
 public ImageFeature[] DetectFeatures(Image <Gray, Byte> image, Image <Gray, byte> mask)
 {
     using (VectorOfKeyPoint kpts = DetectKeyPointsRaw(image, mask))
         using (Matrix <float> desc = ComputeDescriptorsRaw(image, mask, kpts))
         {
             return(Features2DTracker.ConvertToImageFeature(kpts, desc));
         }
 }
Пример #4
0
 private void CreateSurfaceTracker()
 {
     surfaceParameters = new SURFDetector(500, false);
     using (Image<Bgr, Byte> stopSignModel = new Image<Bgr, Byte>(Properties.Resources.SignModel))
     using (Image<Gray, Byte> redMask = GetRedPixelMask(stopSignModel))
     {
         featureTracker = new Features2DTracker(surfaceParameters.DetectFeatures(redMask, null));
     }
 }
Пример #5
0
        static void Run()
        {
            SURFDetector surfParam = new SURFDetector(350, false);

             Image<Gray, Byte> modelImage = new Image<Gray, byte>("box.png");
             //extract features from the object image
             ImageFeature[] modelFeatures = surfParam.DetectFeatures(modelImage, null);

             //Create a Feature Tracker
             Features2DTracker tracker = new Features2DTracker(modelFeatures);

             Image<Gray, Byte> observedImage = new Image<Gray, byte>("box_in_scene.png");

             Stopwatch watch = Stopwatch.StartNew();
             // extract features from the observed image
             ImageFeature[] imageFeatures = surfParam.DetectFeatures(observedImage, null);

             Features2DTracker.MatchedImageFeature[] matchedFeatures = tracker.MatchFeature(imageFeatures, 2, 20);
             matchedFeatures = Features2DTracker.VoteForUniqueness(matchedFeatures, 0.8);
             matchedFeatures = Features2DTracker.VoteForSizeAndOrientation(matchedFeatures, 1.5, 20);
             HomographyMatrix homography = Features2DTracker.GetHomographyMatrixFromMatchedFeatures(matchedFeatures);
             watch.Stop();

             //Merge the object image and the observed image into one image for display
             Image<Gray, Byte> res = modelImage.ConcateVertical(observedImage);

             #region draw lines between the matched features
             foreach (Features2DTracker.MatchedImageFeature matchedFeature in matchedFeatures)
             {
            PointF p = matchedFeature.ObservedFeature.KeyPoint.Point;
            p.Y += modelImage.Height;
            res.Draw(new LineSegment2DF(matchedFeature.SimilarFeatures[0].Feature.KeyPoint.Point, p), new Gray(0), 1);
             }
             #endregion

             #region draw the project region on the image
             if (homography != null)
             {  //draw a rectangle along the projected model
            Rectangle rect = modelImage.ROI;
            PointF[] pts = new PointF[] {
               new PointF(rect.Left, rect.Bottom),
               new PointF(rect.Right, rect.Bottom),
               new PointF(rect.Right, rect.Top),
               new PointF(rect.Left, rect.Top)};
            homography.ProjectPoints(pts);

            for (int i = 0; i < pts.Length; i++)
               pts[i].Y += modelImage.Height;

            res.DrawPolyline(Array.ConvertAll<PointF, Point>(pts, Point.Round), true, new Gray(255.0), 5);
             }
             #endregion

             ImageViewer.Show(res, String.Format("Matched in {0} milliseconds", watch.ElapsedMilliseconds));
        }
Пример #6
0
        /// <summary>
        /// Compute the descriptor given the image and the point location
        /// </summary>
        /// <param name="image">The image where the descriptor will be computed from</param>
        /// <param name="mask">The optional mask, can be null if not needed</param>
        /// <param name="keyPoints">The keypoint where the descriptor will be computed from</param>
        /// <returns>The image features founded on the keypoint location</returns>
        public ImageFeature[] ComputeDescriptors(Image <Gray, Byte> image, Image <Gray, byte> mask, MKeyPoint[] keyPoints)
        {
            int sizeOfdescriptor = _surfParams.Extended ? 128 : 64;

            using (VectorOfKeyPoint pts = new VectorOfKeyPoint())
            {
                pts.Push(keyPoints);
                using (Matrix <float> descriptors = ComputeDescriptorsRaw(image, mask, pts))
                    return(Features2DTracker.ConvertToImageFeature(pts, descriptors));
            }
        }
Пример #7
0
        /*
         * /// <summary>
         * /// Compute the descriptor given the bgr image and the point location, using oppponent color (CGIV 2008 "Color Descriptors for Object Category Recognition").
         * /// </summary>
         * /// <param name="image">The image where the descriptor will be computed from</param>
         * /// <param name="keyPoints">The keypoint where the descriptor will be computed from</param>
         * /// <returns>The descriptors founded on the keypoint location</returns>
         * public Matrix<float> ComputeDescriptorsRaw(Image<Bgr, Byte> image, VectorOfKeyPoint keyPoints)
         * {
         * int count = keyPoints.Size;
         * if (count == 0) return null;
         * Matrix<float> descriptors = new Matrix<float>(count, DescriptorSize * 3, 1);
         * CvSIFTDetectorComputeDescriptorsBGR(_ptr, image, keyPoints, descriptors);
         * return descriptors;
         * }*/

        /// <summary>
        /// Compute the descriptor given the image and the point location
        /// </summary>
        /// <param name="image">The image where the descriptor will be computed from</param>
        /// <param name="mask">The optional mask, can be null if not needed</param>
        /// <param name="keyPoints">The keypoint where the descriptor will be computed from</param>
        /// <returns>The descriptors founded on the keypoint location</returns>
        public ImageFeature[] ComputeDescriptors(Image <Gray, Byte> image, Image <Gray, byte> mask, MKeyPoint[] keyPoints)
        {
            if (keyPoints.Length == 0)
            {
                return(new ImageFeature[0]);
            }
            using (VectorOfKeyPoint kpts = new VectorOfKeyPoint())
            {
                kpts.Push(keyPoints);
                using (Matrix <float> descriptor = ComputeDescriptorsRaw(image, mask, kpts))
                {
                    return(Features2DTracker.ConvertToImageFeature(kpts, descriptor));
                }
            }
        }
Пример #8
0
        public ImageDetector(Image<Gray, Byte> imgModel)
        {
            _detector = new SURFDetector(500, false);
            ImageFeature<float>[] features = _detector.DetectFeatures(imgModel, null);
            if (features.Length == 0)
                throw new Exception("No image feature has been found in the image model");
            _tracker = new Features2DTracker<float>(features);

            _octagonStorage = new MemStorage();
            _octagon = new Contour<Point>(_octagonStorage);
            _octagon.PushMulti(new Point[] {
            new Point(1, 0),
            new Point(2, 0),
            new Point(3, 1),
            new Point(3, 2),
            new Point(2, 3),
            new Point(1, 3),
            new Point(0, 2),
            new Point(0, 1)},
               Emgu.CV.CvEnum.BACK_OR_FRONT.FRONT);
        }
Пример #9
0
 public StopSignDetector(Image<Bgr, Byte> stopSignModel)
 {
     _detector = new SURFDetector(500, false);
     using (Image<Gray, Byte> redMask = GetRedPixelMask(stopSignModel))
     {
         ImageFeature<float>[] temp = _detector.DetectFeatures(redMask, null);
         _tracker = new Features2DTracker<float>(temp);
     }
     _octagonStorage = new MemStorage();
     _octagon = new Contour<Point>(_octagonStorage);
     _octagon.PushMulti(new Point[] {
         new Point(1, 0),
         new Point(2, 0),
         new Point(3, 1),
         new Point(3, 2),
         new Point(2, 3),
         new Point(1, 3),
         new Point(0, 2),
         new Point(0, 1)},
         Emgu.CV.CvEnum.BACK_OR_FRONT.FRONT
     );
 }
Пример #10
0
        public List<ImageRecord> QueryImage(string queryImagePath,  SurfSettings surfSetting = null)
        {
            List<ImageRecord> rtnImageList = new List<ImageRecord>();

            var observerFeatureSets = SurfRepository.GetSurfRecordList();

            #region Surf Dectator Region
            double hessianThresh = 500;
            double uniquenessThreshold = 0.8;
            int minGoodMatchPercent = 50;

            if (surfSetting != null)
            {
                hessianThresh = surfSetting.HessianThresh.Value;
                uniquenessThreshold = surfSetting.UniquenessThreshold.Value;
                minGoodMatchPercent = surfSetting.GoodMatchThreshold.Value;
            }

            SURFDetector surfDectector = new SURFDetector(hessianThresh, false);
            #endregion

            using (Image<Gray, byte> modelImage = new Image<Gray, byte>(queryImagePath))
            {
                ImageFeature<float>[] modelFeatures = surfDectector.DetectFeatures(modelImage, null);

                if (modelFeatures.Length < 4) throw new InvalidOperationException("Model image didn't have any significant features to detect");

                Features2DTracker<float> tracker = new Features2DTracker<float>(modelFeatures);
                foreach (var surfRecord in observerFeatureSets)
                {
                    string queryImageName = System.IO.Path.GetFileName(queryImagePath);
                    string modelImageName = surfRecord.ImageName;

                    Features2DTracker<float>.MatchedImageFeature[] matchedFeatures = tracker.MatchFeature(surfRecord.observerFeatures, 2);

                    Features2DTracker<float>.MatchedImageFeature[] uniqueFeatures = Features2DTracker<float>.VoteForUniqueness(matchedFeatures, uniquenessThreshold);

                    Features2DTracker<float>.MatchedImageFeature[] uniqueRotOriFeatures = Features2DTracker<float>.VoteForSizeAndOrientation(uniqueFeatures, 1.5, 20);

                    int goodMatchCount = 0;
                    goodMatchCount = uniqueRotOriFeatures.Length;
                    bool isMatch = false;

                    double totalnumberOfModelFeature = modelFeatures.Length;
                    double matchPercentage = ((totalnumberOfModelFeature - (double)goodMatchCount) / totalnumberOfModelFeature);
                    matchPercentage = (1 - matchPercentage) * 100;
                    matchPercentage = Math.Round(matchPercentage);
                    if (matchPercentage >= minGoodMatchPercent)
                    {

                        HomographyMatrix homography =
                            Features2DTracker<float>.GetHomographyMatrixFromMatchedFeatures(uniqueRotOriFeatures);
                        if (homography != null)
                        {
                            isMatch = homography.IsValid(5);
                            if (isMatch)
                            {
                                surfRecord.Distance = matchPercentage;
                                rtnImageList.Add((ImageRecord)surfRecord);
                            }
                        }
                    }

                    //bool isMatch = false;
                    //if (uniqueFeatures.Length > 4)
                    //{
                    //    HomographyMatrix homography =
                    //        Features2DTracker<float>.GetHomographyMatrixFromMatchedFeatures(uniqueRotOriFeatures);
                    //    if (homography != null)
                    //    {
                    //        isMatch = homography.IsValid(5);
                    //    }
                    //}

                    //if (isMatch)
                    //{
                    //    surfRecord.Distance = goodMatchCount;
                    //    rtnImageList.Add((ImageRecord)surfRecord);
                    //}

                    //int goodMatchCount = 0;
                    //foreach (Features2DTracker<float>.MatchedImageFeature ms in matchedFeatures)
                    //{
                    //    if (ms.SimilarFeatures[0].Distance < uniquenessThreshold)
                    //        goodMatchCount++;
                    //}

                    //double totalnumberOfModelFeature = modelFeatures.Length;
                    //double matchPercentage = ((totalnumberOfModelFeature - (double)goodMatchCount) / totalnumberOfModelFeature);
                    //matchPercentage = (1 - matchPercentage) * 100;
                    //matchPercentage = Math.Round(matchPercentage);
                    //if (matchPercentage >= minGoodMatchPercent)
                    //{
                    //    surfRecord.Distance = matchPercentage;
                    //    rtnImageList.Add((ImageRecord)surfRecord);
                    //}
                }
            }
            rtnImageList = rtnImageList.OrderByDescending(x => x.Distance).ToList();
            return rtnImageList;
        }
Пример #11
0
        private void InitializeModels(string modelImageDirPath)
        {
            string []files = Directory.GetFiles(modelFilesDirectory);

            progressBar.Minimum = 0;
            progressBar.Maximum = files.Length * 2;
            progressBar.Value = 0;

            foreach (string file in files)
            {
                if (initRequired)
                {
                    Image<Gray, Byte> modelImage = new Image<Gray, byte>(file);
                    ImageFeature[] modelFeatures = surfParam.DetectFeatures(modelImage, null);
                    Features2DTracker tracker = new Features2DTracker(modelFeatures);

                    modelFeatureTracker.Add(tracker);
                    modelImageFilePath.Add(file);
                }

                progressBar.Increment(1);
            }

            initRequired = false;
        }