public static BoundingRectangle ( |
||
points | The collection of points | |
return |
/// <summary> /// Use camshift to track the feature /// </summary> /// <param name="observedFeatures">The feature found from the observed image</param> /// <param name="initRegion">The predicted location of the model in the observed image. If not known, use MCvBox2D.Empty as default</param> /// <param name="priorMask">The mask that should be the same size as the observed image. Contains a priori value of the probability a match can be found. If you are not sure, pass an image fills with 1.0s</param> /// <returns>If a match is found, the homography projection matrix is returned. Otherwise null is returned</returns> public HomographyMatrix CamShiftTrack(SURFFeature[] observedFeatures, MCvBox2D initRegion, Image <Gray, Single> priorMask) { using (Image <Gray, Single> matchMask = new Image <Gray, Single>(priorMask.Size)) { #region get the list of matched point on the observed image Single[, ,] matchMaskData = matchMask.Data; //Compute the matched features MatchedSURFFeature[] matchedFeature = _matcher.MatchFeature(observedFeatures, 2, 20); matchedFeature = VoteForUniqueness(matchedFeature, 0.8); foreach (MatchedSURFFeature f in matchedFeature) { PointF p = f.ObservedFeature.Point.pt; matchMaskData[(int)p.Y, (int)p.X, 0] = 1.0f / (float)f.SimilarFeatures[0].Distance; } #endregion Rectangle startRegion; if (initRegion.Equals(MCvBox2D.Empty)) { startRegion = matchMask.ROI; } else { startRegion = PointCollection.BoundingRectangle(initRegion.GetVertices()); if (startRegion.IntersectsWith(matchMask.ROI)) { startRegion.Intersect(matchMask.ROI); } } CvInvoke.cvMul(matchMask.Ptr, priorMask.Ptr, matchMask.Ptr, 1.0); MCvConnectedComp comp; MCvBox2D currentRegion; //Updates the current location CvInvoke.cvCamShift(matchMask.Ptr, startRegion, new MCvTermCriteria(10, 1.0e-8), out comp, out currentRegion); #region find the SURF features that belongs to the current Region MatchedSURFFeature[] featuesInCurrentRegion; using (MemStorage stor = new MemStorage()) { Contour <System.Drawing.PointF> contour = new Contour <PointF>(stor); contour.PushMulti(currentRegion.GetVertices(), Emgu.CV.CvEnum.BACK_OR_FRONT.BACK); CvInvoke.cvBoundingRect(contour.Ptr, 1); //this is required before calling the InContour function featuesInCurrentRegion = Array.FindAll(matchedFeature, delegate(MatchedSURFFeature f) { return(contour.InContour(f.ObservedFeature.Point.pt) >= 0); }); } #endregion return(GetHomographyMatrixFromMatchedFeatures(VoteForSizeAndOrientation(featuesInCurrentRegion, 1.5, 20))); } }
/// <summary> /// Create a planar subdivision from the given points. The ROI is computed as the minimum bounding Rectangle for the input points /// </summary> /// <param name="silent">If true, any exception during insert will be ignored</param> /// <param name="points">The points to be inserted to this planar subdivision</param> public Subdiv2D(PointF[] points, bool silent = false) { #region Find the region of interest _roi = PointCollection.BoundingRectangle(points); #endregion _ptr = CvInvoke.cveSubdiv2DCreate(ref _roi); Insert(points, silent); }
/// <summary> /// Create a planar subdivision from the given points. The ROI is computed as the minimun bounding Rectangle for the input points /// </summary> /// <param name="silent">If true, any exception during insert will be ignored</param> /// <param name="points">The points to be inserted to this planar subdivision</param> public PlanarSubdivision(PointF[] points, bool silent) { #region Find the region of interest _roi = PointCollection.BoundingRectangle(points); #endregion _storage = new MemStorage(); _ptr = CvInvoke.cvCreateSubdivDelaunay2D(_roi, _storage); Insert(points, silent); }