Exemple #1
0
        /// <summary>
        /// Use camshift to track the feature
        /// </summary>
        /// <param name="observedFeatures">The feature found from the observed image</param>
        /// <param name="initRegion">The predicted location of the model in the observed image. If not known, use MCvBox2D.Empty as default</param>
        /// <param name="priorMask">The mask that should be the same size as the observed image. Contains a priori value of the probability a match can be found. If you are not sure, pass an image fills with 1.0s</param>
        /// <returns>If a match is found, the homography projection matrix is returned. Otherwise null is returned</returns>
        public HomographyMatrix CamShiftTrack(SURFFeature[] observedFeatures, MCvBox2D initRegion, Image <Gray, Single> priorMask)
        {
            using (Image <Gray, Single> matchMask = new Image <Gray, Single>(priorMask.Size))
            {
                #region get the list of matched point on the observed image
                Single[, ,] matchMaskData = matchMask.Data;

                //Compute the matched features
                MatchedSURFFeature[] matchedFeature = _matcher.MatchFeature(observedFeatures, 2, 20);
                matchedFeature = VoteForUniqueness(matchedFeature, 0.8);

                foreach (MatchedSURFFeature f in matchedFeature)
                {
                    PointF p = f.ObservedFeature.Point.pt;
                    matchMaskData[(int)p.Y, (int)p.X, 0] = 1.0f / (float)f.SimilarFeatures[0].Distance;
                }
                #endregion

                Rectangle startRegion;
                if (initRegion.Equals(MCvBox2D.Empty))
                {
                    startRegion = matchMask.ROI;
                }
                else
                {
                    startRegion = PointCollection.BoundingRectangle(initRegion.GetVertices());
                    if (startRegion.IntersectsWith(matchMask.ROI))
                    {
                        startRegion.Intersect(matchMask.ROI);
                    }
                }

                CvInvoke.cvMul(matchMask.Ptr, priorMask.Ptr, matchMask.Ptr, 1.0);

                MCvConnectedComp comp;
                MCvBox2D         currentRegion;
                //Updates the current location
                CvInvoke.cvCamShift(matchMask.Ptr, startRegion, new MCvTermCriteria(10, 1.0e-8), out comp, out currentRegion);

                #region find the SURF features that belongs to the current Region
                MatchedSURFFeature[] featuesInCurrentRegion;
                using (MemStorage stor = new MemStorage())
                {
                    Contour <System.Drawing.PointF> contour = new Contour <PointF>(stor);
                    contour.PushMulti(currentRegion.GetVertices(), Emgu.CV.CvEnum.BACK_OR_FRONT.BACK);

                    CvInvoke.cvBoundingRect(contour.Ptr, 1); //this is required before calling the InContour function

                    featuesInCurrentRegion = Array.FindAll(matchedFeature,
                                                           delegate(MatchedSURFFeature f)
                                                           { return(contour.InContour(f.ObservedFeature.Point.pt) >= 0); });
                }
                #endregion

                return(GetHomographyMatrixFromMatchedFeatures(VoteForSizeAndOrientation(featuesInCurrentRegion, 1.5, 20)));
            }
        }
Exemple #2
0
      /// <summary>
      /// Use camshift to track the feature
      /// </summary>
      /// <param name="observedFeatures">The feature found from the observed image</param>
      /// <param name="initRegion">The predicted location of the model in the observed image. If not known, use MCvBox2D.Empty as default</param>
      /// <param name="priorMask">The mask that should be the same size as the observed image. Contains a priori value of the probability a match can be found. If you are not sure, pass an image fills with 1.0s</param>
      /// <returns>If a match is found, the homography projection matrix is returned. Otherwise null is returned</returns>
      public HomographyMatrix CamShiftTrack(SURFFeature[] observedFeatures, MCvBox2D initRegion, Image<Gray, Single> priorMask)
      {
         using (Image<Gray, Single> matchMask = new Image<Gray, Single>(priorMask.Size))
         {
            #region get the list of matched point on the observed image
            Single[, ,] matchMaskData = matchMask.Data;

            //Compute the matched features
            MatchedSURFFeature[] matchedFeature = _matcher.MatchFeature(observedFeatures, 2, 20);
            matchedFeature = VoteForUniqueness(matchedFeature, 0.8);

            foreach (MatchedSURFFeature f in matchedFeature)
            {
               PointF p = f.ObservedFeature.Point.pt;
               matchMaskData[(int)p.Y, (int)p.X, 0] = 1.0f / (float) f.SimilarFeatures[0].Distance;
            }
            #endregion

            Rectangle startRegion;
            if (initRegion.Equals(MCvBox2D.Empty))
               startRegion = matchMask.ROI;
            else
            {
               startRegion = PointCollection.BoundingRectangle(initRegion.GetVertices());
               if (startRegion.IntersectsWith(matchMask.ROI))
                  startRegion.Intersect(matchMask.ROI);
            }

            CvInvoke.cvMul(matchMask.Ptr, priorMask.Ptr, matchMask.Ptr, 1.0);

            MCvConnectedComp comp;
            MCvBox2D currentRegion;
            //Updates the current location
            CvInvoke.cvCamShift(matchMask.Ptr, startRegion, new MCvTermCriteria(10, 1.0e-8), out comp, out currentRegion);

            #region find the SURF features that belongs to the current Region
            MatchedSURFFeature[] featuesInCurrentRegion;
            using (MemStorage stor = new MemStorage())
            {
               Contour<System.Drawing.PointF> contour = new Contour<PointF>(stor);
               contour.PushMulti(currentRegion.GetVertices(), Emgu.CV.CvEnum.BACK_OR_FRONT.BACK);

               CvInvoke.cvBoundingRect(contour.Ptr, 1); //this is required before calling the InContour function

               featuesInCurrentRegion = Array.FindAll(matchedFeature,
                  delegate(MatchedSURFFeature f)
                  { return contour.InContour(f.ObservedFeature.Point.pt) >= 0; });
            }
            #endregion

            return GetHomographyMatrixFromMatchedFeatures(VoteForSizeAndOrientation(featuesInCurrentRegion, 1.5, 20 ));
         }
      }
Exemple #3
0
        private Image<T, Byte> GetSubImage<T>(Image<T, Byte> image, MCvBox2D box, float angle) where T : struct, IColor
        {

            if (box.Equals(MCvBox2D.Empty))
            {
                return null;
            }
            // ensure the low most side being horizontal. 
            // angle is between the horizontal axis and the first side (i.e. width) in degrees
            //if (box.angle < -45)
            //{
            //    box.angle += 90;
            //    float width = box.size.Width;
            //    box.size.Width = box.size.Height;
            //    box.size.Height = width;
            //}

            Image<T, Byte> mask = (image.Copy(box) * 255);
            var img = ResizeImage<T>(mask, 60, 60);
            return img;

        }