private IplImage RotateImage(IplImage img, Double angle) { Mat imgMat = null; IplImage tempImage = null; IplImage rotatedImg = null; CvSeq <CvPoint> contours = null; CvMemStorage storage = null; if (angle == 0.0 || Double.IsNaN(angle)) { angle = -3.0; } if (angle != 0.0 && !Double.IsNaN(angle)) { try { rotatedImg = new IplImage(img.Size, img.Depth, img.NChannels); tempImage = new IplImage(img.Size, img.Depth, 1); Cv.CvtColor(img, tempImage, ColorConversion.RgbaToGray); imgMat = new Mat(tempImage); storage = new CvMemStorage(); Cv.FindContours(tempImage, storage, out contours, CvContour.SizeOf, ContourRetrieval.List, ContourChain.ApproxNone); contours = Cv.ApproxPoly(contours, CvContour.SizeOf, storage, ApproxPolyMethod.DP, 3, true); CvBox2D box = Cv.MinAreaRect2(contours); CvMat rotMat = Cv.GetRotationMatrix2D(box.Center, angle, 1.0); Cv.WarpAffine(img, rotatedImg, rotMat, Interpolation.Cubic); } catch (Exception ex) { if (null != rotatedImg) { Cv.ReleaseImage(rotatedImg); rotatedImg = null; } throw ex; } finally { if (null != tempImage) { Cv.ReleaseImage(tempImage); } } } return(rotatedImg); }
// Converts the ROI (screen coordinates) into world coordinates and // positions world gameobject to that position and rotation void ROIScreenToGameObject(CvBox2D boxToTrack, GameObject obj1) { Vector2 origin; origin.x = objectScreenPosition.position.x + scaleObjectWidth(boxToTrack.Center.X); origin.y = objectScreenPosition.position.y + scaleObjectHeight(boxToTrack.Center.Y); obj1.transform.position = Camera.main.ScreenToWorldPoint(new Vector3(origin.x, Screen.height - origin.y, Mathf.Abs(transform.position.z - Camera.main.transform.position.z))); obj1.transform.eulerAngles = new Vector3(0, 0, 90 - boxToTrack.Angle); obj1.transform.localScale = new Vector3(scaleObjectHeight(boxToTrack.Size.Height) / 100, scaleObjectWidth(boxToTrack.Size.Width) / 100, 1); }
/// <summary> /// 枠だけの楕円,もしくは塗りつぶされた楕円を描画する /// </summary> /// <param name="img">楕円が描かれる画像.</param> /// <param name="box">描画したい楕円を囲む矩形領域.</param> /// <param name="color">楕円の色.</param> /// <param name="thickness">楕円境界線の幅.[既定値は1]</param> /// <param name="line_type">楕円境界線の種類.[既定値はLineType.Link8]</param> /// <param name="shift">矩形領域の頂点座標の小数点以下の桁を表すビット数.[既定値は0]</param> #else /// <summary> /// Draws simple or thick elliptic arc or fills ellipse sector /// </summary> /// <param name="img">Image. </param> /// <param name="box">The enclosing box of the ellipse drawn </param> /// <param name="color">Ellipse color. </param> /// <param name="thickness">Thickness of the ellipse boundary. [By default this is 1]</param> /// <param name="line_type">Type of the ellipse boundary. [By default this is LineType.Link8]</param> /// <param name="shift">Number of fractional bits in the box vertex coordinates. [By default this is 0]</param> #endif public static void Ellipse(this Mat img, CvBox2D box, CvScalar color, int thickness = 1, LineType line_type = LineType.Link8, int shift = 0) { if (img == null) { throw new ArgumentNullException("img"); } CvSize axes = new CvSize { Width = (int)Math.Round(box.Size.Height * 0.5), Height = (int)Math.Round(box.Size.Width * 0.5) }; Ellipse(img, box.Center, axes, box.Angle, 0, 360, color, thickness, line_type, shift); }
/// <summary> /// /// </summary> /// <param name="box"></param> public RotatedRect(CvBox2D box) { Center = box.Center; Size = box.Size; Angle = box.Angle; }
/// <summary> /// 枠だけの楕円,もしくは塗りつぶされた楕円を描画する /// </summary> /// <param name="img">楕円が描かれる画像.</param> /// <param name="box">描画したい楕円を囲む矩形領域.</param> /// <param name="color">楕円の色.</param> #else /// <summary> /// Draws simple or thick elliptic arc or fills ellipse sector /// </summary> /// <param name="img">Image. </param> /// <param name="box">The enclosing box of the ellipse drawn </param> /// <param name="color">Ellipse color. </param> #endif public static void Ellipse(Mat img, CvBox2D box, CvScalar color) { if (img == null) throw new ArgumentNullException("img"); CvSize axes = new CvSize { Width = (int)Math.Round(box.Size.Height * 0.5), Height = (int)Math.Round(box.Size.Width * 0.5) }; Ellipse(img, box.Center, axes, box.Angle, 0, 360, color, 1, LineType.Link8, 0); }
public static extern void cvBoxPoints(CvBox2D box, IntPtr pt);
// Use the CamShift algorithm to track to base histogram throughout the // succeeding frames void CalculateCamShift(CvMat _image) { CvMat _backProject = CalculateBackProjection(_image, _histogramToTrack); // Create convolution kernel for erosion and dilation IplConvKernel elementErode = Cv.CreateStructuringElementEx(10, 10, 5, 5, ElementShape.Rect, null); IplConvKernel elementDilate = Cv.CreateStructuringElementEx(4, 4, 2, 2, ElementShape.Rect, null); // Try eroding and then dilating the back projection // Hopefully this will get rid of the noise in favor of the blob objects. Cv.Erode(_backProject, _backProject, elementErode, 1); Cv.Dilate(_backProject, _backProject, elementDilate, 1); if (backprojWindowFlag) { Cv.ShowImage("Back Projection", _backProject); } // Parameters returned by Camshift algorithm CvBox2D _outBox; CvConnectedComp _connectComp; // Set the criteria for the CamShift algorithm // Maximum 10 iterations and at least 1 pixel change in centroid CvTermCriteria term_criteria = Cv.TermCriteria(CriteriaType.Iteration | CriteriaType.Epsilon, 10, 1); // Draw object center based on Kalman filter prediction CvMat _kalmanPrediction = _kalman.Predict(); int predictX = Mathf.FloorToInt((float)_kalmanPrediction.GetReal2D(0, 0)); int predictY = Mathf.FloorToInt((float)_kalmanPrediction.GetReal2D(1, 0)); // Run the CamShift algorithm if (Cv.CamShift(_backProject, _rectToTrack, term_criteria, out _connectComp, out _outBox) > 0) { // Use the CamShift estimate of the object center to update the Kalman model CvMat _kalmanMeasurement = Cv.CreateMat(2, 1, MatrixType.F32C1); // Update Kalman model with raw data from Camshift estimate _kalmanMeasurement.Set2D(0, 0, _outBox.Center.X); // Raw X position _kalmanMeasurement.Set2D(1, 0, _outBox.Center.Y); // Raw Y position //_kalmanMeasurement.Set2D (2, 0, _outBox.Center.X - lastPosition.X); //_kalmanMeasurement.Set2D (3, 0, _outBox.Center.Y - lastPosition.Y); lastPosition.X = Mathf.FloorToInt(_outBox.Center.X); lastPosition.Y = Mathf.FloorToInt(_outBox.Center.Y); _kalman.Correct(_kalmanMeasurement); // Correct Kalman model with raw data // CamShift function returns two values: _connectComp and _outBox. // _connectComp contains is the newly estimated position and size // of the region of interest. This is passed into the subsequent // call to CamShift // Update the ROI rectangle with CamShift's new estimate of the ROI _rectToTrack = CheckROIBounds(_connectComp.Rect); // Draw a rectangle over the tracked ROI // This method will draw the rectangle but won't rotate it. _image.DrawRect(_rectToTrack, CvColor.Aqua); _image.DrawMarker(predictX, predictY, CvColor.Aqua); // _outBox contains a rotated rectangle esimating the position, size, and orientation // of the object we want to track (specified by the initial region of interest). // We then take this estimation and draw a rotated bounding box. // This method will draw the rotated rectangle rotatedBoxToTrack = _outBox; // Draw a rotated rectangle representing Camshift's estimate of the // object's position, size, and orientation. _image.DrawPolyLine(rectangleBoxPoint(_outBox.BoxPoints()), true, CvColor.Red); } else { //Debug.Log ("Object lost by Camshift tracker"); _image.DrawMarker(predictX, predictY, CvColor.Purple, MarkerStyle.CircleLine); _rectToTrack = CheckROIBounds(new CvRect(predictX - Mathf.FloorToInt(_rectToTrack.Width / 2), predictY - Mathf.FloorToInt(_rectToTrack.Height / 2), _rectToTrack.Width, _rectToTrack.Height)); _image.DrawRect(_rectToTrack, CvColor.Purple); } if (trackWindowFlag) { Cv.ShowImage("Image", _image); } }