void Update() { Mat imgBGRMat; imgBGRMat = webcam.QueryFrame(); if (imgBGRMat == null) // if frame is not ready { return; } CvInvoke.Flip(imgBGRMat, imgBGRMat, FlipType.Horizontal); CvInvoke.Resize(imgBGRMat, imgBGRMat, new Size(webcam_Capture_WIDTH, webcam_Capture_HEIGHT)); Mat thresoldOUTFilter = new Mat(); Mat imgOUTMat = new Mat(); CvInvoke.CvtColor(imgBGRMat, imgOUTMat, ColorConversion.Bgr2Hsv); Image <Hsv, byte> imgOUTBin = imgOUTMat.ToImage <Hsv, byte>(); thresoldOUTFilter = imgOUTBin.InRange(new Hsv(minValueH, minValueS, minValueV), new Hsv(maxValueH, maxValueS, maxValueV)).Mat; int operationSize = 1; Mat structuringElement = CvInvoke.GetStructuringElement(ElementShape.Cross, new Size(2 * operationSize + 1, 2 * operationSize + 1), new Point(operationSize, operationSize)); CvInvoke.Erode(thresoldOUTFilter, thresoldOUTFilter, structuringElement, new Point(-1, -1), 1, BorderType.Constant, new MCvScalar(0)); // Erode -> Dilate <=> Opening CvInvoke.Dilate(thresoldOUTFilter, thresoldOUTFilter, structuringElement, new Point(-1, -1), 1, BorderType.Constant, new MCvScalar(0)); VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); VectorOfPoint biggestContour = new VectorOfPoint(); int biggestContourIndex = -1; double biggestContourArea = 0; Mat hierarchy = new Mat(); CvInvoke.FindContours(thresoldOUTFilter, contours, hierarchy, RetrType.List, ChainApproxMethod.ChainApproxNone); // Find Contour using Binary filter for (int i = 0; i < contours.Size; i++) { double a = CvInvoke.ContourArea(contours[i], false); if (a > biggestContourArea) { biggestContourArea = a; biggestContourIndex = i; biggestContour = contours[i]; } } if (biggestContourIndex != -1 && biggestContour.Size > 0) { // Determine Bounding Rectangle and setting its related values RotatedRect boundRec = CvInvoke.MinAreaRect(biggestContour); PointF[] boundRecPoints = boundRec.GetVertices(); // Draw Bounding Rectangle DrawPointsFRectangle(boundRecPoints, imgBGRMat); } texture = Utils.ConvertMatToTex2D(imgBGRMat, texture, webcam_Capture_WIDTH, webcam_Capture_HEIGHT); }
/// <summary> /// Draws the bound rotated rectangle. /// </summary> /// <param name="rect">The rotated rectangle.</param> /// <param name="annotatedImage">The image with bound rectangles.</param> /// <param name="data">The raw data for rectangles.</param> private void SetRect(RotatedRect rect, ref Image <Bgr, byte> annotatedImage, ref List <object> data) { var vertices = rect.GetVertices().Select(Point.Round).ToArray(); annotatedImage.DrawPolyline(vertices, true, new Bgr(_annoColor.Color()), _lineThick); data.Add(new RotatedBox(rect)); }
private static Rectangle _FindLargestRectFromContours(VectorOfVectorOfPoint contours) { //IF CONTOURS WERE FOUND ON THE IMAGE, //OBTAIN THE BIGGEST CONTOUR double largestArea = 0; int largestAreaIndex = 0; for (int i = 0; i < contours.Size; i++) { double A = CvInvoke.ContourArea(contours[i]); if (A > largestArea) { largestArea = A; largestAreaIndex = i; } } try { RotatedRect r_rect = CvInvoke.MinAreaRect(contours[largestAreaIndex]); PointF[] vertixles = r_rect.GetVertices(); int x = (int)vertixles[1].X; int y = (int)vertixles[2].Y; //OBTAIN RECTANGLE THAT SURROUNDS THE DETECTED CONTOUR int width = (int)((vertixles[3].X) - (vertixles[1].X)); int height = (int)((vertixles[0].Y) - (vertixles[2].Y)); Rectangle rectangle = new Rectangle(x, y, width, height); return(rectangle); } catch { return(new Rectangle(0, 0, 1, 1)); } }
/// <summary> /// 矩形顶点编号 /// </summary> /// <param name="pointfs"></param> /// <param name="angle"></param> /// <returns></returns> public static PointF[] RectCode(RotatedRect rect) { PointF[] p = rect.GetVertices(); PointF[] pointfs = new PointF[4]; pointfs[0] = p[0]; pointfs[1] = p[0]; pointfs[2] = p[0]; pointfs[3] = p[0]; //逆时针编号 for (int i = 1; i < 4; i++) { if (p[i].X < p[i - 1].X) { pointfs[0] = p[i]; } if (p[i].Y > p[i - 1].Y) { pointfs[1] = p[i]; } if (p[i].X > p[i - 1].X) { pointfs[2] = p[i]; } if (p[i].Y < p[i - 1].Y) { pointfs[3] = p[i]; } } return(pointfs); }
private (byte[] ProcessedPixels, System.Drawing.PointF[] centroids, System.Drawing.Point[]) ProceessImage(Image <Gray, byte> FilteredImage) { UMat FrameCannyImage = new UMat(); CvInvoke.Canny(FilteredImage, FrameCannyImage, 100, 200); VectorOfVectorOfPoint FrameImageContours = new VectorOfVectorOfPoint(); CvInvoke.FindContours(FrameCannyImage, FrameImageContours, null, RetrType.External, ChainApproxMethod.ChainApproxSimple); VectorOfPoint FrameAppContour = new VectorOfPoint(2); Image <Gray, byte> image = new Image <Gray, byte>(512, 424); for (int k = 0; k < FrameImageContours.Size; k++) { VectorOfPoint contour = FrameImageContours[k]; if (CvInvoke.ContourArea(contour) > CvInvoke.ContourArea(FrameAppContour)) { FrameAppContour = contour; } } RotatedRect FrameRotatedRect = CvInvoke.MinAreaRect(FrameAppContour); System.Drawing.PointF[] FrameInitCenters = FrameRotatedRect.GetVertices(); System.Drawing.Point[] contours = FrameAppContour.ToArray(); byte[] NonZeroPixels = FilteredImage.Bytes; return(NonZeroPixels, FrameInitCenters, FrameAppContour.ToArray()); }
private void btnCentro_Click(object sender, EventArgs e) { Emgu.CV.Util.VectorOfVectorOfPoint contours = new Emgu.CV.Util.VectorOfVectorOfPoint(); Mat mat = new Mat(); CvInvoke.FindContours(imgout, contours, mat, Emgu.CV.CvEnum.RetrType.External, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple); for (int i = 0; i < contours.Size; i++) { var area = CvInvoke.ContourArea(contours[i]); if (area > (int)numericSenCount.Value) { Rectangle rect = CvInvoke.BoundingRectangle(contours[i]); RotatedRect box = CvInvoke.MinAreaRect(contours[i]); PointF[] Vertices = box.GetVertices(); PointF point = box.Center; papel = pictureBox1.CreateGraphics(); pluma.Width = 5; pluma.Color = Color.DarkBlue; papel.DrawRectangle(pluma, point.X, point.Y, 5, 5); } } /*PointF edge1 = new PointF(Vertices[1].X - Vertices[0].X, * Vertices[1].Y - Vertices[0].Y); * PointF edge2 = new PointF(Vertices[2].X - Vertices[1].X, Vertices[2].Y - Vertices[1].Y); * double edge1Magnitude = Math.Sqrt(Math.Pow(edge1.X, 2) + Math.Pow(edge1.Y, 2)); * double edge2Magnitude = Math.Sqrt(Math.Pow(edge2.X, 2) + Math.Pow(edge2.Y, 2)); * PointF primaryEdge = edge1Magnitude > edge2Magnitude ? edge1 : edge2; * PointF reference = new PointF(Vertices[1].X, Vertices[0].Y); * double thetaRads = Math.Acos(Math.Sqrt(Math.Pow((primaryEdge.X * reference.X),2) + Math.Pow((primaryEdge.Y * reference.Y),2))/ (Math.Sqrt(Math.Pow(primaryEdge.X,2)+Math.Pow(reference.X,2))* Math.Sqrt(Math.Pow(primaryEdge.Y, 2) + Math.Pow(reference.Y, 2)))); * double thetaDeg = thetaRads * 180 / Math.PI; * MessageBox.Show(thetaDeg.ToString());*/ }
/// <summary> /// 绘制矩形区域 /// </summary> /// <param name="rrec"></param> /// <param name="img"></param> public static void DrawRotatedRect(RotatedRect rrec, Image <Bgr, byte> img) { PointF[] pointfs = rrec.GetVertices(); for (int j = 0; j < pointfs.Length; j++) { CvInvoke.Line(img, new Point((int)pointfs[j].X, (int)pointfs[j].Y), new Point((int)pointfs[(j + 1) % 4].X, (int)pointfs[(j + 1) % 4].Y), new MCvScalar(0, 0, 255, 255), 4); img.Draw(new CircleF(pointfs[j], 5), new Bgr(Color.Blue), 5); } }
private void ProcessImage_Side2() { Image <Hsv, byte> HSVImage; Image <Gray, byte> HSVMaskedImage; Image <Gray, Byte> FilteredImage; Image <Gray, byte> CannyImage; Image <Bgr, byte> DrawBound; VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); RotatedRect BoundingBox; int Rectcount; float MaxArea = 0; RotatedRect MaxRect = new RotatedRect(); HSVImage = OriginalImage.Convert <Hsv, byte>(); Hsv lowerLimit = new Hsv(trackBar_H_Low_3.Value, trackBar_S_Low_3.Value, trackBar_V_Low_3.Value); //50 80 Hsv upperLimit = new Hsv(trackBar_H_Up_3.Value, trackBar_S_Up_3.Value, trackBar_V_Up_3.Value); //50 80 HSVMaskedImage = HSVImage.InRange(lowerLimit, upperLimit); imageBox_HSV_3.Image = HSVMaskedImage; FilteredImage = HSVMaskedImage.SmoothMedian(25); if (checkBox_Filter_3.Checked) { imageBox_HSV_3.Image = FilteredImage; } else { imageBox_HSV_3.Image = HSVMaskedImage; } CannyImage = FilteredImage.Clone(); CvInvoke.Canny(FilteredImage, CannyImage, 255, 255, 5, true); DrawBound = OriginalImage.Clone(); CvInvoke.FindContours(CannyImage, contours, null, RetrType.External, ChainApproxMethod.ChainApproxSimple); Rectcount = contours.Size; for (int i = 0; i < Rectcount; i++) { using (VectorOfPoint contour = contours[i]) { // 使用 BoundingRectangle 取得框選矩形 BoundingBox = CvInvoke.MinAreaRect(contour); if ((BoundingBox.Size.Width * BoundingBox.Size.Height) > MaxArea) { MaxArea = BoundingBox.Size.Width * BoundingBox.Size.Height; MaxRect = BoundingBox; } } } CvInvoke.Polylines(DrawBound, Array.ConvertAll(MaxRect.GetVertices(), Point.Round), true, new Bgr(Color.Red).MCvScalar, 3); CvInvoke.Line(DrawBound, new Point(0, HeightThreshold), new Point(OriginalImage.Width, HeightThreshold), new Bgr(Color.DeepPink).MCvScalar, 5); imageBox_Result_3.Image = DrawBound; SideRect_2 = MaxRect; }
private Image <Rgb, byte> RotateContour(Image <Rgb, byte> image, VectorOfPoint contour) { int edgePixelSize = 2; RotatedRect box = CvInvoke.MinAreaRect(contour); if (box.Angle < -45.0) { float tmp = box.Size.Width; box.Size.Width = box.Size.Height; box.Size.Height = tmp; box.Angle += 90.0f; } else if (box.Angle > 45.0) { float tmp = box.Size.Width; box.Size.Width = box.Size.Height; box.Size.Height = tmp; box.Angle -= 90.0f; } using (UMat rotatedMat = new UMat()) using (UMat resizedMat = new UMat()) { PointF[] srcCorners = box.GetVertices(); PointF[] destCorners = new PointF[] { new PointF(0, box.Size.Height - 1), new PointF(0, 0), new PointF(box.Size.Width - 1, 0), new PointF(box.Size.Width - 1, box.Size.Height - 1) }; using (Mat rot = CvInvoke.GetAffineTransform(srcCorners, destCorners)) CvInvoke.WarpAffine(image, rotatedMat, rot, Size.Round(box.Size)); Size approxSize = new Size( Settings.ResizeWidth, Settings.ResizeHeight); double scale = Math.Min(approxSize.Width / box.Size.Width, approxSize.Height / box.Size.Height); Size newSize = new Size((int)Math.Round(box.Size.Width * scale), (int)Math.Round(box.Size.Height * scale)); CvInvoke.Resize(rotatedMat, resizedMat, newSize, 0, 0, Inter.Cubic); Rectangle newRoi = new Rectangle(new Point(edgePixelSize, edgePixelSize), resizedMat.Size - new Size(2 * edgePixelSize, 2 * edgePixelSize)); UMat plate = new UMat(resizedMat, newRoi); return(plate.ToImage <Rgb, byte>()); } }
private void updatedRect(Transform t, RotatedRect r) { if (!isListening) { return; } var points_kinect = r.GetVertices(); calibrateHomographyMaths(points_kinect, DepthSourceManager.Width(), DepthSourceManager.Height()); }
private static System.Drawing.Point MarkDetectedObject(Mat frame, VectorOfPoint contour) { System.Drawing.Point[] points = contour.ToArray(); PointF[] pointsF = new PointF[points.Length]; int[] nums = Enumerable.Range(0, points.Length).ToArray(); Parallel.ForEach(nums, i => { pointsF[i] = new PointF(points[i].X, points[i].Y); }); RotatedRect rotatedRectangle = CvInvoke.MinAreaRect(pointsF); Rectangle box = CvInvoke.BoundingRectangle(contour); System.Drawing.Point center = new System.Drawing.Point(box.X + box.Width / 2, box.Y + box.Height / 2); System.Drawing.Point rotatedCenter = new System.Drawing.Point(Convert.ToInt32(rotatedRectangle.Center.X), Convert.ToInt32(rotatedRectangle.Center.Y)); System.Drawing.Point shift = new System.Drawing.Point(center.X - rotatedCenter.X, center.Y - rotatedCenter.Y); PointF[] verticesF = rotatedRectangle.GetVertices(); System.Drawing.Point[] vertices = new System.Drawing.Point[4]; vertices[0] = new System.Drawing.Point(Convert.ToInt32(verticesF[0].X) + shift.X, Convert.ToInt32(verticesF[0].Y) + shift.Y); vertices[1] = new System.Drawing.Point(Convert.ToInt32(verticesF[1].X) + shift.X, Convert.ToInt32(verticesF[1].Y) + shift.Y); vertices[2] = new System.Drawing.Point(Convert.ToInt32(verticesF[2].X) + shift.X, Convert.ToInt32(verticesF[2].Y) + shift.Y); vertices[3] = new System.Drawing.Point(Convert.ToInt32(verticesF[3].X) + shift.X, Convert.ToInt32(verticesF[3].Y) + shift.Y); double fontCoeficient = 0.00109d; double fontScale = (resultMatImage.Cols + resultMatImage.Rows) * 0.5d * fontCoeficient; int lineThickness = Convert.ToInt32(fontScale * 3d); if (lineThickness <= 0) { lineThickness = 1; } MCvScalar redColor = new Bgr(Color.Red).MCvScalar; CvInvoke.Rectangle(frame, box, new Bgr(Color.Green).MCvScalar, lineThickness); CvInvoke.Line(frame, vertices[0], vertices[1], redColor, lineThickness); CvInvoke.Line(frame, vertices[1], vertices[2], redColor, lineThickness); CvInvoke.Line(frame, vertices[2], vertices[3], redColor, lineThickness); CvInvoke.Line(frame, vertices[3], vertices[0], redColor, lineThickness); CvInvoke.Circle(frame, center, lineThickness * 3, redColor, -1); string positionString = center.X.ToString().Replace(',', '.') + ", " + center.Y.ToString().Replace(',', '.'); WriteMultilineText(frame, positionString, new System.Drawing.Point(box.Right + 5, center.Y)); return(center); }
public ModifiedRotatedRect(RotatedRect rect) { if (rect.GetVertices().Length == 4) { List <PointF> lv = rect.GetVertices().ToList(); // sort the by Y axis from lowest to highest lv.Sort((a, b) => (a.Y).CompareTo(b.Y)); UpperMost = new PointPair(); LowerMost = new PointPair(); UpperMost.pt1 = new Point((int)lv[0].X, (int)lv[0].Y); UpperMost.pt2 = new Point((int)lv[1].X, (int)lv[1].Y); LowerMost.pt1 = new Point((int)lv[2].X, (int)lv[2].Y); LowerMost.pt2 = new Point((int)lv[3].X, (int)lv[3].Y); DesignatePoints(); } else { throw new Exception("Vertices must be equal to four"); } }
private List <Mat> RoI(List <RotatedRect> rect, Mat img) { List <Mat> mat = new List <Mat>(); foreach (var rr in rect) { RotatedRect box = rr; if (box.Angle < -45.0) { float tmp = box.Size.Width; box.Size.Width = box.Size.Height; box.Size.Height = tmp; box.Angle += 90.0f; } else if (box.Angle > 45.0) { float tmp = box.Size.Width; box.Size.Width = box.Size.Height; box.Size.Height = tmp; box.Angle -= 90.0f; } //double whRatio = (double)box.Size.Width / box.Size.Height; //if (!(2.0 < whRatio && whRatio < 8.0)) //{ using (Mat thresh = new Mat()) using (Mat tmp1 = new Mat()) using (Mat tmp2 = new Mat()) { PointF[] srcCorners = box.GetVertices(); PointF[] destCorners = new PointF[] { new PointF(0, box.Size.Height - 1), new PointF(0, 0), new PointF(box.Size.Width - 1, 0), new PointF(box.Size.Width - 1, box.Size.Height - 1) }; using (Mat rot = CvInvoke.GetAffineTransform(srcCorners, destCorners)) { CvInvoke.WarpAffine(img, tmp1, rot, Size.Round(box.Size)); } Size approxSize = new Size(600, 600); double scale = Math.Min(approxSize.Width / box.Size.Width, approxSize.Height / box.Size.Height); Size newSize = new Size((int)Math.Round(box.Size.Width * scale), (int)Math.Round(box.Size.Height * scale)); CvInvoke.Resize(tmp1, tmp2, newSize, 0, 0, Inter.Cubic); CvInvoke.Threshold(tmp2, thresh, 100, 255, ThresholdType.BinaryInv); mat.Add(thresh.Clone()); } //} } return(mat); }
public void ProcessOpenings() { for (int i = 0; i < Openings.Count; i++) { //InputArray pts = InputArray.Create(Openings[i].Select(p => new Point(p.X, p.Y))); //RotatedRect rect = Cv2.MinAreaRect(pts); RotatedRect rect = CvInvoke.MinAreaRect(PicOpenings[i].Select(p => new PointF((float)p.X, (float)p.Y)).ToArray()); var corners = rect.GetVertices().Select(p => new MWPoint2D(p.X, p.Y)).ToList(); PicOpenings[i] = corners; } Openings = PicOpenings.Select(op => new Opening() { RawPoints = op.Select(p => new MWPoint2D(p.X, p.Y)).ToList() }).ToList(); RescaleOpenings(); }
private void button2_Click(object sender, EventArgs e) { Emgu.CV.Util.VectorOfVectorOfPoint contours = new Emgu.CV.Util.VectorOfVectorOfPoint(); Mat mat = new Mat(); CvInvoke.FindContours(imgout, contours, mat, Emgu.CV.CvEnum.RetrType.External, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple); for (int i = 0; i < contours.Size; i++) { Rectangle rect = CvInvoke.BoundingRectangle(contours[i]); RotatedRect box = CvInvoke.MinAreaRect(contours[i]); PointF[] Vertices = box.GetVertices(); //MessageBox.Show(Vertices[0].X.ToString() + "," + Vertices[0].Y.ToString() + " " + Vertices[1].X.ToString() + "," + Vertices[1].Y.ToString()); //MessageBox.Show(Vertices[2].X.ToString() + "," + Vertices[2].Y.ToString() + " " + Vertices[3].X.ToString() + "," + Vertices[3].Y.ToString()); //Vector AB PointF vecAB = new PointF((Vertices[1].X) - (Vertices[0].X), (Vertices[1].Y) - (Vertices[0].Y)); PointF vecBC = new PointF((Vertices[2].X) - (Vertices[1].X), (Vertices[2].Y) - (Vertices[1].Y)); //Modulo de Vab double modAB = Math.Sqrt(Math.Pow(vecAB.X, 2) + Math.Pow(vecAB.Y, 2)); double modBC = Math.Sqrt(Math.Pow(vecBC.X, 2) + Math.Pow(vecBC.Y, 2)); if (modAB < modBC) { vecAB = vecBC; modAB = modBC; } //MessageBox.Show(vecAB.ToString()); //VectorHorizontal PointF vecHor = new PointF(1, 0); //Modulo de Vab //double modAB = Math.Sqrt(Math.Pow(vecAB.X, 2) + Math.Pow(vecAB.Y, 2)); double modHor = 1.0; double proPunto = vecAB.X * vecHor.X; double anguloRad = Math.Acos(proPunto / modAB * modHor); double anguloDeg = (anguloRad * 180) / Math.PI; //MessageBox.Show(anguloDeg.ToString()); var blob = CvInvoke.BoundingRectangle(contours[i]); blob.Y -= 5; CvInvoke.PutText(img, "Angulo:" + anguloDeg.ToString(), blob.Location, Emgu.CV.CvEnum.FontFace.HersheySimplex, 0.5, new MCvScalar(255, 255, 255)); //CvInvoke.PutText(imgout, "Angulo: " + anguloDeg.ToString(), blob.Location, Emgu.CV.CvEnum.FontFace.HersheyPlain, 1.0, new MCvScalar(100)); pictureBox1.Image = img.ToBitmap(); //pictureBox2.Image = imgout.ToBitmap(); } }
public void ProcessColumns() { for (int i = 0; i < PicColumns?.Count; i++) { RotatedRect rect = CvInvoke.MinAreaRect(PicColumns[i].Select(p => new PointF((float)p.X, (float)p.Y)).ToArray()); var corners = rect.GetVertices().Select(p => new MWPoint2D(p.X, p.Y)).ToList(); double l1 = Points.Distance(corners[0], corners[1]); double l2 = Points.Distance(corners[1], corners[2]); List <MWPoint2D> newPoints = new List <MWPoint2D>(); MWPoint2D center = new MWPoint2D(0.5 * (corners[0].X + corners[2].X), 0.5 * (corners[0].Y + corners[2].Y)); ColumnDims cd; MWVector2D v; if (l1 > l2) { double ratio = l1 / l2; cd = ColDims.Aggregate(ColDims[0], (closest, next) => Math.Abs(ratio - next.Ratio) < Math.Abs(ratio - closest.Ratio) ? next : closest); v = new MWVector2D(corners[1].X - corners[0].X, corners[1].Y - corners[0].Y); } else { double ratio = l2 / l1; cd = ColDims.Aggregate(ColDims[0], (closest, next) => Math.Abs(ratio - next.Ratio) < Math.Abs(ratio - closest.Ratio) ? next : closest); v = new MWVector2D(corners[2].X - corners[1].X, corners[2].Y - corners[1].Y); } double L = Math.Max(cd.D1, cd.D2) / 1e3; double W = Math.Min(cd.D1, cd.D2) / 1e3; v = v.Normalize(); MWVector2D n = new MWVector2D(-v.Y, v.X); newPoints.Add(new MWPoint2D(+0.5 * L * v.X + 0.5 * W * n.X, +0.5 * L * v.Y + 0.5 * W * n.Y)); newPoints.Add(new MWPoint2D(+0.5 * L * v.X - 0.5 * W * n.X, +0.5 * L * v.Y - 0.5 * W * n.Y)); newPoints.Add(new MWPoint2D(-0.5 * L * v.X - 0.5 * W * n.X, -0.5 * L * v.Y - 0.5 * W * n.Y)); newPoints.Add(new MWPoint2D(-0.5 * L * v.X + 0.5 * W * n.X, -0.5 * L * v.Y + 0.5 * W * n.Y)); Columns.Add(new Column() { RawCenter = center, SectionPoints = newPoints }); } RescaleColumns(); }
public static int MinAreaBoundingBox(Image <Gray, byte> src, Image <Bgr, byte> draw) { using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) { CvInvoke.FindContours(src, contours, null, RetrType.External, ChainApproxMethod.ChainApproxSimple); int count = contours.Size; for (int i = 0; i < count; i++) { using (VectorOfPoint contour = contours[i]) { // MinAreaRect 是此版本找尋最小面積矩形的方法。 RotatedRect BoundingBox = CvInvoke.MinAreaRect(contour); CvInvoke.Polylines(draw, Array.ConvertAll(BoundingBox.GetVertices(), Point.Round), true, new Bgr(Color.DeepPink).MCvScalar, 3); } } return(count); } }
/// <summary> /// /// </summary> /// <param name="imgOriginal">is Image<Bgr, byte></param> /// <param name="rotatedRect"></param> /// <returns></returns> public IImage ExtractPaperArea(IImage imgOriginal, RotatedRect rotatedRect) { if (imgOriginal == null) { return(null); } Image <Bgr, byte> imgColor = imgOriginal as Image <Bgr, byte>; if (imgColor == null) { throw new Exception("imgmat must be Image<Bgr, byte> object."); } Image <Bgr, byte> imgReturn = imgColor; if (rotatedRect.Size != SizeF.Empty) { imgReturn = imgColor.Copy(rotatedRect); } List <PointF> _2YMaxPoint = rotatedRect.GetVertices().OrderByDescending(p => p.Y).Take(2).ToList(); if (_2YMaxPoint[1].X > _2YMaxPoint[0].X) { if (rotatedRect.Angle > 0) { imgReturn = imgReturn.Rotate(90, new Bgr(255, 255, 255), false); } } else { if (rotatedRect.Angle < -45) { imgReturn = imgReturn.Rotate(-90, new Bgr(255, 255, 255), false); } } return(imgReturn); }
/// <summary> /// /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void Capture_ImageGrabbed(object sender, EventArgs e) { RotatedRect marker = find_marker(currentFrame.Convert <Gray, byte>()); PointF[] points = marker.GetVertices(); for (int i = 0; i < 4; i++) { CvInvoke.Circle(currentFrame, new Point((int)points[i].X, (int)points[i].Y), 3, new MCvScalar(0, 255, 0, 100), 2); CvInvoke.PutText(currentFrame, "P" + (i + 1), new Point((int)points[i].X, (int)points[i].Y), Emgu.CV.CvEnum.FontFace.HersheyComplex, 1, new MCvScalar(0, 0, 255, 100), 1); if (i == 3) { CvInvoke.Line(currentFrame, new Point((int)points[3].X, (int)points[3].Y), new Point((int)points[0].X, (int)points[0].Y), new MCvScalar(0, 255, 0, 100)); break; } CvInvoke.Line(currentFrame, new Point((int)points[i].X, (int)points[i].Y), new Point((int)points[i + 1].X, (int)points[i + 1].Y), new MCvScalar(0, 255, 0, 100), 2); } p34 = Math.Pow(Math.Pow((points[2].X - points[3].X), 2) + Math.Pow(points[2].Y - points[3].Y, 2), 0.5); double dis = f * knownWidth / p34; CvInvoke.PutText(currentFrame, "distance:" + dis.ToString(), new Point(50, 50), Emgu.CV.CvEnum.FontFace.HersheyComplex, 1, new MCvScalar(0, 0, 255, 100), 1); imageBox.Image = currentFrame; }
public static System.Drawing.Point[] Search2Tip(VectorOfPoint cnt) { if (null != cnt) { RotatedRect Rou = CvInvoke.MinAreaRect(cnt); System.Drawing.PointF[] pF = Rou.GetVertices(); System.Drawing.Point[] p = new System.Drawing.Point[pF.Length]; for (int i = 0; i < p.Length; i++) { p[i] = System.Drawing.Point.Round(pF[i]); } var a = p.OrderBy(item => (Math.Pow(item.X, 2) + Math.Pow(item.Y, 2))); var b = p.OrderBy(item => item.X); //clear p p = null; p = new System.Drawing.Point[2]; p[0] = a.ElementAt(0); foreach (var item in b) { if (item == a.ElementAt(0)) { continue; } else { p[1] = item; break; } } return(p); } else { return(null); } }
/// <summary> /// 身份证号区域 /// </summary> /// <param name="img"></param> /// <returns></returns> public static RotatedRect IdRotatedRect(Image <Bgr, byte> img) { Image <Bgr, byte> a = new Image <Bgr, byte>(img.Size); VectorOfVectorOfPoint con = GetContours(BinImg(img)); Point[][] con1 = con.ToArrayOfArray(); PointF[][] con2 = Array.ConvertAll <Point[], PointF[]>(con1, new Converter <Point[], PointF[]>(PointToPointF)); for (int i = 0; i < con.Size; i++) { RotatedRect rrec = CvInvoke.MinAreaRect(con2[i]); float w = rrec.Size.Width; float h = rrec.Size.Height; if (w / h > 6 && w / h < 10 && h > 20) { PointF[] pointfs = rrec.GetVertices(); for (int j = 0; j < pointfs.Length; j++) { CvInvoke.Line(a, new Point((int)pointfs[j].X, (int)pointfs[j].Y), new Point((int)pointfs[(j + 1) % 4].X, (int)pointfs[(j + 1) % 4].Y), new MCvScalar(0, 0, 255, 255), 4); } return(rrec); } } return(new RotatedRect()); }
public bool TryParseBox(IInputArray gray, RotatedRect box, out string text) { text = null; if (box.Angle < -45.0) { float tmp = box.Size.Width; box.Size.Width = box.Size.Height; box.Size.Height = tmp; box.Angle = 90.0f; } else if (box.Angle > 45.0) { float tmp = box.Size.Width; box.Size.Width = box.Size.Height; box.Size.Height = tmp; box.Angle -= 90.0f; } /* * double whRatio = (double)box.Size.Width / box.Size.Height; * * if (!(3.0 < whRatio && whRatio < 10.0)) * { * return false; * } */ using (UMat tmp1 = new UMat()) { using (UMat tmp2 = new UMat()) { PointF[] srcCorners = box.GetVertices(); PointF[] destCorners = new PointF[] { new PointF(0, box.Size.Height - 1), new PointF(0, 0), new PointF(box.Size.Width - 1, 0), new PointF(box.Size.Width - 1, box.Size.Height - 1) }; using (Mat rot = CameraCalibration.GetAffineTransform(srcCorners, destCorners)) { CvInvoke.WarpAffine(gray, tmp1, rot, Size.Round(box.Size)); } // resize the license plate such that the front is ~ 10-12. This size of front results in better accuracy from tesseract Size approxSize = new Size(240, 180); double scale = Math.Min(approxSize.Width / box.Size.Width, approxSize.Height / box.Size.Height); Size newSize = new Size((int)Math.Round(box.Size.Width * scale), (int)Math.Round(box.Size.Height * scale)); CvInvoke.Resize(tmp1, tmp2, newSize, 0, 0, Inter.Cubic); // removes some pixels from the edge int edgePixelSize = 2; Rectangle newRoi = new Rectangle(new Point(edgePixelSize, edgePixelSize), tmp2.Size - new Size(2 * edgePixelSize, 2 * edgePixelSize)); UMat plate = new UMat(tmp2, newRoi); UMat filteredPlate = FilterPlate(plate); /* * Tesseract.Character[] words; * * StringBuilder strBuilder = new StringBuilder(); * * using (UMat tmp = filteredPlate.Clone()) * { * //_ocr.Recognize(tmp); * // words = _ocr.GetCharacters(); * * if (words.Length == 0) * { * return true; * } * * for (int i = 0; i < words.Length; i++) * { * strBuilder.Append(words[i].Text); * } * } * * text = strBuilder.ToString(); */ } } return(true); }
private void FindLicensePlate( VectorOfVectorOfPoint contours, int[,] hierachy, int idx, IInputArray gray, IInputArray canny, List <IInputOutputArray> licensePlateImagesList, List <IInputOutputArray> filteredLicensePlateImagesList, List <RotatedRect> detectedLicensePlateRegionList, List <String> licenses) { for (; idx >= 0; idx = hierachy[idx, 0]) { int numberOfChildren = GetNumberOfChildren(hierachy, idx); //if it does not contains any children (charactor), it is not a license plate region if (numberOfChildren == 0) { continue; } using (VectorOfPoint contour = contours[idx]) { if (CvInvoke.ContourArea(contour) > 400) { if (numberOfChildren < 3) { //If the contour has less than 3 children, it is not a license plate (assuming license plate has at least 3 charactor) //However we should search the children of this contour to see if any of them is a license plate FindLicensePlate(contours, hierachy, hierachy[idx, 2], gray, canny, licensePlateImagesList, filteredLicensePlateImagesList, detectedLicensePlateRegionList, licenses); continue; } RotatedRect box = CvInvoke.MinAreaRect(contour); if (box.Angle < -45.0) { float tmp = box.Size.Width; box.Size.Width = box.Size.Height; box.Size.Height = tmp; box.Angle += 90.0f; } else if (box.Angle > 45.0) { float tmp = box.Size.Width; box.Size.Width = box.Size.Height; box.Size.Height = tmp; box.Angle -= 90.0f; } double whRatio = (double)box.Size.Width / box.Size.Height; if (!(3.0 < whRatio && whRatio < 10.0)) //if (!(1.0 < whRatio && whRatio < 2.0)) { //if the width height ratio is not in the specific range,it is not a license plate //However we should search the children of this contour to see if any of them is a license plate //Contour<Point> child = contours.VNext; if (hierachy[idx, 2] > 0) { FindLicensePlate(contours, hierachy, hierachy[idx, 2], gray, canny, licensePlateImagesList, filteredLicensePlateImagesList, detectedLicensePlateRegionList, licenses); } continue; } using (UMat tmp1 = new UMat()) using (UMat tmp2 = new UMat()) { PointF[] srcCorners = box.GetVertices(); PointF[] destCorners = new PointF[] { new PointF(0, box.Size.Height - 1), new PointF(0, 0), new PointF(box.Size.Width - 1, 0), new PointF(box.Size.Width - 1, box.Size.Height - 1) }; using (Mat rot = CvInvoke.GetAffineTransform(srcCorners, destCorners)) { CvInvoke.WarpAffine(gray, tmp1, rot, Size.Round(box.Size)); } //resize the license plate such that the front is ~ 10-12. This size of front results in better accuracy from tesseract Size approxSize = new Size(240, 180); double scale = Math.Min(approxSize.Width / box.Size.Width, approxSize.Height / box.Size.Height); Size newSize = new Size((int)Math.Round(box.Size.Width * scale), (int)Math.Round(box.Size.Height * scale)); CvInvoke.Resize(tmp1, tmp2, newSize, 0, 0, Inter.Cubic); //removes some pixels from the edge int edgePixelSize = 3; Rectangle newRoi = new Rectangle(new Point(edgePixelSize, edgePixelSize), tmp2.Size - new Size(2 * edgePixelSize, 2 * edgePixelSize)); UMat plate = new UMat(tmp2, newRoi); UMat filteredPlate = FilterPlate(plate); Tesseract.Character[] words; StringBuilder strBuilder = new StringBuilder(); using (UMat tmp = filteredPlate.Clone()) { _ocr.SetImage(tmp); _ocr.Recognize(); strBuilder.Append(_ocr.GetUTF8Text()); /* * words = _ocr.GetCharacters(); * if (words.Length == 0) continue; * for (int i = 0; i < words.Length; i++) * { * strBuilder.Append(words[i].Text); * }*/ } licenses.Add(strBuilder.ToString()); licensePlateImagesList.Add(plate); filteredLicensePlateImagesList.Add(filteredPlate); detectedLicensePlateRegionList.Add(box); } } } } }
private void ProcessFrame(object sender, EventArgs arg) { Mat frame = new Mat(); //_capture.Retrieve(frame, 0); frame = new Mat("C:\\Emgu\\Dump\\ea6b5b28a66c.jpg", LoadImageType.Unchanged); Mat grayFrame = new Mat(); CvInvoke.CvtColor(frame, grayFrame, ColorConversion.Bgr2Gray); Mat smallGrayFrame = new Mat(); CvInvoke.PyrDown(grayFrame, smallGrayFrame); Mat smoothedGrayFrame = new Mat(); CvInvoke.PyrUp(smallGrayFrame, smoothedGrayFrame); CvInvoke.Threshold(smoothedGrayFrame, smoothedGrayFrame, 100, 255, ThresholdType.Binary); //Image<Gray, Byte> smallGrayFrame = grayFrame.PyrDown(); //Image<Gray, Byte> smoothedGrayFrame = smallGrayFrame.PyrUp(); Mat cannyFrame = new Mat(); CvInvoke.Canny(smoothedGrayFrame, cannyFrame, 100, 60); //Image<Gray, Byte> cannyFrame = smoothedGrayFrame.Canny(100, 60); VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); CvInvoke.FindContours(cannyFrame, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple); CvInvoke.DrawContours(frame, contours, 2, new Bgr(Color.Blue).MCvScalar); List<RotatedRect> BL = new List<RotatedRect>(); List<VectorOfPoint> CL = new List<VectorOfPoint>(); for (int i = 0; i < contours.Size; i++) { using (VectorOfPoint contour = contours[i]) using (VectorOfPoint approxContour = new VectorOfPoint()) { CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true); BL.Add(CvInvoke.MinAreaRect(approxContour)); CL.Add(contour); } } VectorOfPoint maxContour = CL[0]; double maxContourArea = CvInvoke.ContourArea(CL[0], false); for (int i = 0; i < CL.Count; i++) { if (CvInvoke.ContourArea(CL[i], false) > maxContourArea) { maxContourArea = CvInvoke.ContourArea(CL[i], false); maxContour = CL[i]; } } RotatedRect TMP = new RotatedRect(); TMP = CvInvoke.MinAreaRect(maxContour); CvInvoke.Polylines(frame, Array.ConvertAll(TMP.GetVertices(), Point.Round), true, new Bgr(Color.Pink).MCvScalar, 2); Image<Bgr, Byte> srcImg = frame.ToImage<Bgr, Byte>(); srcImg.ROI = new Rectangle((int)(TMP.Center.X - 0.5 * TMP.Size.Width), (int)(TMP.Center.Y - 0.5 * TMP.Size.Height), (int)TMP.Size.Width, (int)TMP.Size.Height); Image<Bgr, Byte> croppedImg = srcImg.Copy(); cannyImageBox.Image = croppedImg; float[,] tmp = { {0, frame.Height}, //down {0, 0},//left {frame.Width, 0}, // up {frame.Width, frame.Height} //right }; Matrix<float> sourceMat = new Matrix<float>(tmp); float[,] target = { {0, (float)0.85 * frame.Height}, {0, 0}, {(float)0.85*frame.Width, 0}, {(float)0.55*frame.Width, (float)0.55*frame.Height} }; PointF[] tmpPF = new PointF[4]; PointF[] targetPF = new PointF[4]; for (int i = 0; i < 4; i++) { tmpPF[i].X = tmp[i, 0]; tmpPF[i].Y = tmp[i, 1]; targetPF[i].X = target[i, 0]; targetPF[i].Y = target[i, 1]; } Matrix<float> targetMat = new Matrix<float>(target); Mat TTT = CvInvoke.GetPerspectiveTransform(tmpPF, targetPF); Mat newcroppimg = new Mat(); CvInvoke.WarpPerspective(croppedImg, newcroppimg, TTT, new System.Drawing.Size(241, 240)); //CvInvoke.DrawContours(frame, TMP, 2, new Bgr(Color.Red).MCvScalar); /* foreach (RotatedRect box in BL) { CvInvoke.Polylines(frame, Array.ConvertAll(box.GetVertices(), Point.Round), true, new Bgr(Color.DarkOrange).MCvScalar, 2); }*/ captureImageBox.Image = frame; grayscaleImageBox.Image = newcroppimg; smoothedGrayscaleImageBox.Image = smoothedGrayFrame; //cannyImageBox.Image = cannyFrame; }
private void FindLicensePlate( VectorOfVectorOfPoint contours, int[,] hierachy, int idx, IInputArray gray, IInputArray canny, List <IInputOutputArray> licensePlateImagesList, List <IInputOutputArray> filteredLicensePlateImagesList, List <RotatedRect> detectedLicensePlateRegionList, List <String> licenses) { for (; idx >= 0; idx = hierachy[idx, 0]) { int lettersCount = GetNumberOfChildren(hierachy, idx); //if it does not contains any children (charactor), it is not a license plate region if (lettersCount == 0) { continue; } using (VectorOfPoint contour = contours[idx]) { if (CvInvoke.ContourArea(contour) > 400) { if (lettersCount < 2) { //If the contour has less than 3 children, it is not a license plate (assuming license plate has at least 3 charactor) //However we should search the children of this contour to see if any of them is a license plate FindLicensePlate(contours, hierachy, hierachy[idx, 2], gray, canny, licensePlateImagesList, filteredLicensePlateImagesList, detectedLicensePlateRegionList, licenses); continue; } RotatedRect box = CvInvoke.MinAreaRect(contour); if (box.Angle < -45.0) { float tmp = box.Size.Width; box.Size.Width = box.Size.Height; box.Size.Height = tmp; box.Angle += 90.0f; } else if (box.Angle > 45.0) { float tmp = box.Size.Width; box.Size.Width = box.Size.Height; box.Size.Height = tmp; box.Angle -= 90.0f; } using (UMat tmp1 = new UMat()) using (UMat tmp2 = new UMat()) { PointF[] srcCorners = box.GetVertices(); PointF[] destCorners = new PointF[] { new PointF(0, box.Size.Height - 1), new PointF(0, 0), new PointF(box.Size.Width - 1, 0), new PointF(box.Size.Width - 1, box.Size.Height - 1) }; using (Mat rot = CvInvoke.GetAffineTransform(srcCorners, destCorners)) { CvInvoke.WarpAffine(gray, tmp1, rot, Size.Round(box.Size)); } //resize the license plate such that the front is ~ 10-12. This size of front results in better accuracy from tesseract Size approxSize = new Size(240, 180); double scale = Math.Min(approxSize.Width / box.Size.Width, approxSize.Height / box.Size.Height); Size newSize = new Size((int)Math.Round(box.Size.Width * scale), (int)Math.Round(box.Size.Height * scale)); CvInvoke.Resize(tmp1, tmp2, newSize, 0, 0, Inter.Cubic); //removes some pixels from the edge int edgePixelSize = 3; Rectangle newRoi = new Rectangle(new Point(edgePixelSize, edgePixelSize), tmp2.Size - new Size(2 * edgePixelSize, 2 * edgePixelSize)); UMat plate = new UMat(tmp2, newRoi); ocr.SetImage(plate.Clone()); ocr.Recognize(); licenses.Add(ocr.GetUTF8Text()); licensePlateImagesList.Add(plate); filteredLicensePlateImagesList.Add(plate); detectedLicensePlateRegionList.Add(box); } } } } }
private void ExtractContourAndHull(Image <Bgr, byte> originalImage, Image <Gray, byte> skin) { var contours = new VectorOfVectorOfPoint(); CvInvoke.FindContours(skin, contours, new Mat(), RetrType.List, ChainApproxMethod.ChainApproxSimple); var result2 = 0; VectorOfPoint biggestContour = null; if (contours.Size != 0) { biggestContour = contours[0]; } for (var i = 0; i < contours.Size; i++) { var result1 = contours[i].Size; if (result1 <= result2) { continue; } result2 = result1; biggestContour = contours[i]; } if (biggestContour == null) { return; } currentContour = new VectorOfPoint(); CvInvoke.ApproxPolyDP(biggestContour, currentContour, 0, true); //TODO Get to know why it gives exception //ImageFrame.Draw(biggestContour, 3, new Bgr(Color.LimeGreen)); biggestContour = currentContour; var pointsToFs = new PointF[currentContour.Size]; for (var i = 0; i < currentContour.Size; i++) { pointsToFs[i] = new PointF(currentContour[i].X, currentContour[i].Y); } var hull = CvInvoke.ConvexHull(pointsToFs, true); pointsToFs = new PointF[biggestContour.Size]; for (var i = 0; i < biggestContour.Size; i++) { pointsToFs[i] = new PointF(biggestContour[i].X, biggestContour[i].Y); } box = CvInvoke.MinAreaRect(pointsToFs); var points = box.GetVertices(); var ps = new Point[points.Length]; for (var i = 0; i < points.Length; i++) { ps[i] = new Point((int)points[i].X, (int)points[i].Y); } var hullToPoints = new Point[hull.Length]; for (var i = 0; i < hull.Length; i++) { hullToPoints[i] = Point.Round(hull[i]); } originalImage.DrawPolyline(hullToPoints, true, new Bgr(200, 125, 75), 2); originalImage.Draw(new CircleF(new PointF(box.Center.X, box.Center.Y), 3), new Bgr(200, 125, 75), 2); var convexHull = new VectorOfInt(); CvInvoke.ConvexHull(currentContour, convexHull, false, false); defects = new Mat(); CvInvoke.ConvexityDefects(currentContour, convexHull, defects); if (!defects.IsEmpty) { Matrix <int> m = new Matrix <int>(defects.Rows, defects.Cols, defects.NumberOfChannels); // copy Mat to a matrix... defects.CopyTo(m); Matrix <int>[] channels = m.Split(); if (channels.Length >= 2) { startIndex = channels.ElementAt(0).Data; endIndex = channels.ElementAt(1).Data; depthIndex = channels.ElementAt(2).Data; } } }
public void TestGetBox2DPoints() { RotatedRect box = new RotatedRect( new PointF(3.0f, 2.0f), new SizeF(4.0f, 6.0f), 0.0f); PointF[] vertices = box.GetVertices(); //TODO: Find out why the following test fails. (x, y) convention changed. //Assert.IsTrue(vertices[0].Equals(new PointF(0.0f, 0.0f))); //Assert.IsTrue(vertices[1].Equals(new PointF(6.0f, 0.0f))); }
private void FindLicensePlate( VectorOfVectorOfPoint contours, int[,] hierachy, int idx, IInputArray gray, IInputArray canny, List <IInputOutputArray> licensePlateImagesList, List <IInputOutputArray> filteredLicensePlateImagesList, List <RotatedRect> detectedLicensePlateRegionList, List <String> licenses) { for (; idx >= 0; idx = hierachy[idx, 0]) { int numberOfChildren = GetNumberOfChildren(hierachy, idx); //if it does not contains any children (charactor), it is not a license plate region if (numberOfChildren == 0) { continue; } using (VectorOfPoint contour = contours[idx]) { if (CvInvoke.ContourArea(contour) > 400) { if (numberOfChildren < 3) { FindLicensePlate(contours, hierachy, hierachy[idx, 2], gray, canny, licensePlateImagesList, filteredLicensePlateImagesList, detectedLicensePlateRegionList, licenses); continue; } RotatedRect box = CvInvoke.MinAreaRect(contour); if (box.Angle < -45.0) { float tmp = box.Size.Width; box.Size.Width = box.Size.Height; box.Size.Height = tmp; box.Angle += 90.0f; } else if (box.Angle > 45.0) { float tmp = box.Size.Width; box.Size.Width = box.Size.Height; box.Size.Height = tmp; box.Angle -= 90.0f; } double whRatio = (double)box.Size.Width / box.Size.Height; if (!(3.0 < whRatio && whRatio < 10.0)) //if (!(1.0 < whRatio && whRatio < 2.0)) { if (hierachy[idx, 2] > 0) { FindLicensePlate(contours, hierachy, hierachy[idx, 2], gray, canny, licensePlateImagesList, filteredLicensePlateImagesList, detectedLicensePlateRegionList, licenses); } continue; } using (UMat tmp1 = new UMat()) using (UMat tmp2 = new UMat()) { PointF[] srcCorners = box.GetVertices(); PointF[] destCorners = new PointF[] { new PointF(0, box.Size.Height - 1), new PointF(0, 0), new PointF(box.Size.Width - 1, 0), new PointF(box.Size.Width - 1, box.Size.Height - 1) }; using (Mat rot = CvInvoke.GetAffineTransform(srcCorners, destCorners)) { CvInvoke.WarpAffine(gray, tmp1, rot, Size.Round(box.Size)); } Size approxSize = new Size(240, 180); double scale = Math.Min(approxSize.Width / box.Size.Width, approxSize.Height / box.Size.Height); Size newSize = new Size((int)Math.Round(box.Size.Width * scale), (int)Math.Round(box.Size.Height * scale)); CvInvoke.Resize(tmp1, tmp2, newSize, 0, 0, Inter.Cubic); int edgePixelSize = 3; Rectangle newRoi = new Rectangle(new Point(edgePixelSize, edgePixelSize), tmp2.Size - new Size(2 * edgePixelSize, 2 * edgePixelSize)); UMat plate = new UMat(tmp2, newRoi); UMat filteredPlate = FilterPlate(plate); StringBuilder strBuilder = new StringBuilder(); using (UMat tmp = filteredPlate.Clone()) { _ocr.SetImage(tmp); _ocr.Recognize(); strBuilder.Append(_ocr.GetUTF8Text()); } licenses.Add(strBuilder.ToString()); licensePlateImagesList.Add(plate); filteredLicensePlateImagesList.Add(filteredPlate); detectedLicensePlateRegionList.Add(box); } } } } }
private void FindLicensePlate( VectorOfVectorOfPoint contours, int[,] hierachy, int idx, IInputArray gray, IInputArray canny, List <IInputOutputArray> licensePlateImagesList, List <IInputOutputArray> filteredLicensePlateImagesList, List <RotatedRect> detectedLicensePlateRegionList, List <String> licenses, int ocr_mode, int threshold_parameter) { for (; idx >= 0; idx = hierachy[idx, 0]) { int numberOfChildren = GetNumberOfChildren(hierachy, idx); //if it does not contains any children (charactor), it is not a license plate region if (numberOfChildren == 0) { continue; } using (VectorOfPoint contour = contours[idx]) { if (CvInvoke.ContourArea(contour) > 400) { if (numberOfChildren < 6) { //If the contour has less than 6 children, it is not a license plate (assuming license plate has at least 3 charactor) //However we should search the children of this contour to see if any of them is a license plate FindLicensePlate(contours, hierachy, hierachy[idx, 2], gray, canny, licensePlateImagesList, filteredLicensePlateImagesList, detectedLicensePlateRegionList, licenses, ocr_mode, threshold_parameter); continue; } RotatedRect box = CvInvoke.MinAreaRect(contour); if (box.Angle < -45.0) { float tmp = box.Size.Width; box.Size.Width = box.Size.Height; box.Size.Height = tmp; box.Angle += 90.0f; } else if (box.Angle > 45.0) { float tmp = box.Size.Width; box.Size.Width = box.Size.Height; box.Size.Height = tmp; box.Angle -= 90.0f; } double whRatio = (double)box.Size.Width / box.Size.Height; if (!(3.0 < whRatio && whRatio < 10.0)) //if (!(1.0 < whRatio && whRatio < 2.0)) { //if the width height ratio is not in the specific range,it is not a license plate //However we should search the children of this contour to see if any of them is a license plate //Contour<Point> child = contours.VNext; if (hierachy[idx, 2] > 0) { FindLicensePlate(contours, hierachy, hierachy[idx, 2], gray, canny, licensePlateImagesList, filteredLicensePlateImagesList, detectedLicensePlateRegionList, licenses, ocr_mode, threshold_parameter); } continue; } using (UMat tmp1 = new UMat()) using (UMat tmp2 = new UMat()) { PointF[] srcCorners = box.GetVertices(); PointF[] destCorners = new PointF[] { new PointF(0, box.Size.Height - 1), new PointF(0, 0), new PointF(box.Size.Width - 1, 0), new PointF(box.Size.Width - 1, box.Size.Height - 1) }; using (Mat rot = CvInvoke.GetAffineTransform(srcCorners, destCorners)) { //box.Center.X -= 20; //box.Size.Height += 20; //box.Size.Width += 20; //box.Center.Y -= 20; CvInvoke.WarpAffine(gray, tmp1, rot, Size.Round(box.Size)); } SaveImageClass.SaveImage(gray, "gray2.jpg"); SaveImageClass.SaveImage(tmp1, "tmp1.jpg"); //resize the license plate such that the front is ~ 10-12. This size of front results in better accuracy from tesseract Size approxSize = new Size(240, 180); double scale = Math.Min(approxSize.Width / box.Size.Width, approxSize.Height / box.Size.Height); Size newSize = new Size((int)Math.Round(box.Size.Width * scale), (int)Math.Round(box.Size.Height * scale)); CvInvoke.Resize(tmp1, tmp2, newSize, 0, 0, Inter.Cubic); SaveImageClass.SaveImage(tmp1, "tmp1after.jpg"); SaveImageClass.SaveImage(tmp2, "tmp2.jpg"); //removes some pixels from the edge int edgePixelSize = 3; Rectangle newRoi = new Rectangle(new Point(edgePixelSize, edgePixelSize), tmp2.Size - new Size(2 * edgePixelSize, 2 * edgePixelSize)); UMat plate = new UMat(tmp2, newRoi); SaveImageClass.SaveImage(plate, "plate.jpg"); UMat filteredPlate = new UMat(); filteredPlate = FilterPlate(plate, threshold_parameter); SaveImageClass.SaveImage(filteredPlate, "filtered.jpg"); StringBuilder strBuilder = new StringBuilder(); switch (ocr_mode) { case 1: { strBuilder = TesseractOCR.GetText(filteredPlate, _ocr); break; } case 2: { strBuilder = GoogleApiOCR.GetText(filteredPlate); break; } case 3: { strBuilder = ComputerVisionOCR.GetText(filteredPlate); break; } default: break; } if (strBuilder != null) { licenses.Add(strBuilder.ToString()); licensePlateImagesList.Add(plate); filteredLicensePlateImagesList.Add(filteredPlate); detectedLicensePlateRegionList.Add(box); } } } } } }
private void FindSpine(LineSegment2D[] lines, RotatedRect rotatedRectangle, Image <Gray, Byte> img) { LineSegment2DF[] initialLines = new LineSegment2DF[2]; if (!rotatedRectangle.Size.IsEmpty) { //Use one of the smaller boundries from rotatedRect for initial detection PointF[] vertices = rotatedRectangle.GetVertices(); PointF p1 = vertices[0]; PointF p2 = vertices[1]; PointF p3 = vertices[2]; PointF p4 = vertices[3]; if (p2.DistanceSquared(p1) < p2.DistanceSquared(p3)) { //p1 and p2 are paired, p3 and p4 are paired initialLines[0] = new LineSegment2DF(p1, p2); initialLines[1] = new LineSegment2DF(p3, p4); } else { //p2 and p3 are paired, p1 and p4 are paired initialLines[0] = new LineSegment2DF(p2, p3); initialLines[1] = new LineSegment2DF(p1, p4); } } else { //Use one of the image sides for intial detection initialLines[0] = new LineSegment2DF(new PointF(0, 0), new PointF(0, img.Height - 1)); initialLines[1] = new LineSegment2DF(new PointF(img.Width - 1, 0), new PointF(img.Width - 1, img.Height - 1)); } //Find closest line segment to initial line double minDistance = double.MaxValue; LineSegment2D?targetLine = null; foreach (var line in lines) { double minDistance1 = MathExtension.MinDistanceFromLineToPoint(initialLines[0].P1, initialLines[0].P2, line.P1); double minDistance2 = MathExtension.MinDistanceFromLineToPoint(initialLines[0].P1, initialLines[0].P2, line.P2); double currentDist = minDistance1 < minDistance2 ? minDistance1 : minDistance2; if (currentDist < minDistance) { minDistance = currentDist; targetLine = line; } } List <LineSegment2D> previousLines = new List <LineSegment2D>(); //We have our target line, try to traverse to the other side LineSegment2D?nextLine = null; Image <Bgr, Byte> moddedImage = img.Convert <Bgr, Byte>(); if (targetLine.HasValue) { previousLines.Add(targetLine.Value); //We have a starting position, lets test it! moddedImage.Draw(targetLine.Value, new Bgr(Color.Red), 2); } do { GetValue(lines, initialLines[1], previousLines.ToArray(), targetLine, ref nextLine); if (nextLine.HasValue) { targetLine = nextLine; previousLines.Add(nextLine.Value); moddedImage.Draw(nextLine.Value, new Bgr(Color.Red), 2); } }while (nextLine.HasValue); DisplayImage3 = ImageService.ToBitmapSource(moddedImage); }
public BoxesBoundary(IBoxBoundary model, RotatedRect rect) : base(model) { Points = rect.GetVertices().Select(x => new Point((int)x.X, (int)x.Y)).ToArray(); Color = new Bgr(System.Drawing.Color.Aqua); Name = "Box - " + Id; }
private void UpdateFrameNumber() { Video.SetFrame(SliderValue); using (Image <Bgr, Byte> orig = Video.GetFrameImage()) using (Image <Gray, Byte> origGray = orig.Convert <Gray, Byte>()) using (Image <Gray, Byte> binary = origGray.ThresholdBinary(new Gray(ThresholdValue), new Gray(255))) using (Image <Gray, Byte> subbed = BinaryBackground.AbsDiff(binary)) { CvBlobs blobs = new CvBlobs(); BlobDetector.Detect(subbed, blobs); CvBlob mouseBlob = null; double maxArea = -1; foreach (var blob in blobs.Values) { if (blob.Area > maxArea) { mouseBlob = blob; maxArea = blob.Area; } } //double gapDistance = GetBestGapDistance(rbsk); double gapDistance = 50; RBSK.Settings.GapDistance = gapDistance; //PointF[] headPoints = ProcessFrame(orig, RBSK); PointF center = mouseBlob.Centroid; //LineSegment2DF[] targetPoints = null; Point[] mouseContour = mouseBlob.GetContour(); orig.DrawPolyline(mouseContour, true, new Bgr(Color.Cyan)); Image1 = ImageService.ToBitmapSource(orig); PointF[] result; if (HeadPoints != null) { result = HeadPoints[SliderValue].HeadPoints; } else { double prob = 0; RBSK headRbsk = MouseService.GetStandardMouseRules(); headRbsk.Settings.GapDistance = 65; headRbsk.Settings.BinaryThreshold = 20; List <List <PointF> > allKeyPoints = headRbsk.FindKeyPoints(mouseContour, headRbsk.Settings.NumberOfSlides, false); result = headRbsk.FindPointsFromRules(allKeyPoints[0], binary, ref prob); } if (result != null) { using (Image <Bgr, Byte> test = orig.Clone()) { foreach (var point in result) { test.Draw(new CircleF(point, 3), new Bgr(Color.Red), 3); } Image1 = ImageService.ToBitmapSource(test); } } else { return; } RotatedRect rotatedRect = CvInvoke.MinAreaRect(mouseContour.Select(x => new PointF(x.X, x.Y)).ToArray()); //Console.WriteLine("Size: " + rotatedRect.Size); ISkeleton skel = ModelResolver.Resolve <ISkeleton>(); Image <Gray, Byte> tempBinary = binary.Clone(); System.Drawing.Rectangle rect = mouseBlob.BoundingBox; Image <Gray, Byte> binaryRoi = tempBinary.GetSubRect(rect); using (Image <Bgr, Byte> displayImage = subbed.Convert <Bgr, Byte>()) using (Image <Gray, Byte> skelImage = skel.GetSkeleton(binaryRoi)) using (Image <Bgr, Byte> drawImage = orig.Clone()) using (Image <Bgr, Byte> tempImage2 = new Image <Bgr, byte>(drawImage.Size)) { //----------------------------------------- if (SkelImage != null) { SkelImage.Dispose(); } SkelImage = skelImage.Clone(); //-------------------------------------------- tempImage2.SetValue(new Bgr(Color.Black)); ISpineFinding spineFinder = ModelResolver.Resolve <ISpineFinding>(); spineFinder.NumberOfCycles = 3; spineFinder.NumberOfIterations = 1; spineFinder.SkeletonImage = skelImage; //spineFinder.RotatedRectangle = rotatedRect; Image5 = ImageService.ToBitmapSource(skelImage); const int delta = 20; double smallestAngle = double.MaxValue; Point tailPoint = Point.Empty; for (int i = 0; i < mouseContour.Length; i++) { int leftDelta = i - delta; int rightDelta = i + delta; if (leftDelta < 0) { leftDelta += mouseContour.Length; } if (rightDelta >= mouseContour.Length) { rightDelta -= mouseContour.Length; } Point testPoint = mouseContour[i]; Point leftPoint = mouseContour[leftDelta]; Point rightPoint = mouseContour[rightDelta]; Vector v1 = new Vector(leftPoint.X - testPoint.X, leftPoint.Y - testPoint.Y); Vector v2 = new Vector(rightPoint.X - testPoint.X, rightPoint.Y - testPoint.Y); double angle = Math.Abs(Vector.AngleBetween(v1, v2)); if (angle < 30 && angle > 9) { if (angle < smallestAngle) { smallestAngle = angle; tailPoint = testPoint; } } } PointF headCornerCorrect = new PointF(result[2].X - rect.X, result[2].Y - rect.Y); PointF tailCornerCorrect = new PointF(tailPoint.X - rect.X, tailPoint.Y - rect.Y); PointF[] spine = spineFinder.GenerateSpine(headCornerCorrect, tailCornerCorrect); Point topCorner = mouseBlob.BoundingBox.Location; PointF[] spineCornerCorrected = new PointF[spine.Length]; for (int i = 0; i < spine.Length; i++) { spineCornerCorrected[i] = new PointF(spine[i].X + topCorner.X, spine[i].Y + topCorner.Y); } ITailFinding tailFinding = ModelResolver.Resolve <ITailFinding>(); double rotatedWidth = rotatedRect.Size.Width < rotatedRect.Size.Height ? rotatedRect.Size.Width : rotatedRect.Size.Height; List <Point> bodyPoints; if (result != null) { double firstDist = result[2].DistanceSquared(spineCornerCorrected.First()); double lastDist = result[2].DistanceSquared(spineCornerCorrected.Last()); if (firstDist < lastDist) { spineCornerCorrected = spineCornerCorrected.Reverse().ToArray(); } } double waistLength; double pelvicArea1, pelvicArea2; tailFinding.FindTail(mouseContour, spineCornerCorrected, displayImage, rotatedWidth, mouseBlob.Centroid, out bodyPoints, out waistLength, out pelvicArea1, out pelvicArea2); Console.WriteLine(smallestAngle); if (!tailPoint.IsEmpty) { drawImage.Draw(new CircleF(tailPoint, 4), new Bgr(Color.Red), 3); } if (bodyPoints != null && bodyPoints.Count > 0) { Point[] bPoints = bodyPoints.ToArray(); double volume = MathExtension.PolygonArea(bPoints); Emgu.CV.Structure.Ellipse fittedEllipse = PointCollection.EllipseLeastSquareFitting(bPoints.Select(x => x.ToPointF()).ToArray()); //CvInvoke.Ellipse(drawImage, fittedEllipse.RotatedRect, new MCvScalar(0, 0, 255), 2); Console.WriteLine("Volume: " + volume + " - " + (fittedEllipse.RotatedRect.Size.Width * fittedEllipse.RotatedRect.Size.Height) + ", Waist Length: " + waistLength); //Alter this to something better if (MathExtension.PolygonArea(bPoints) > (rotatedRect.Size.Height * rotatedRect.Size.Width) / 6 || true) { //tempImage2.FillConvexPoly(bPoints, new Bgr(Color.White)); tempImage2.DrawPolyline(bPoints, true, new Bgr(Color.White)); PointF centroid = MathExtension.FindCentroid(bPoints); System.Drawing.Rectangle minRect; Image <Gray, Byte> temp2 = new Image <Gray, byte>(tempImage2.Width + 2, tempImage2.Height + 2); CvInvoke.FloodFill(tempImage2, temp2, centroid.ToPoint(), new MCvScalar(255, 255, 255), out minRect, new MCvScalar(5, 5, 5), new MCvScalar(5, 5, 5)); using (Image <Gray, Byte> nonZeroImage = tempImage2.Convert <Gray, Byte>()) { int[] volume2 = nonZeroImage.CountNonzero(); Console.WriteLine("Volume2: " + volume2[0]); //int tester = 9; //using (Image<Gray, Byte> t1 = nonZeroImage.Erode(tester)) //using (Image<Gray, Byte> t2 = t1.Dilate(tester)) //using (Image<Gray, Byte> t3 = t2.Erode(tester)) //using (Image<Gray, Byte> t4 = t3.Dilate(tester)) //using (Image<Gray, Byte> t5 = t4.Erode(tester)) //using (Image<Gray, Byte> t6 = t5.Dilate(tester)) //using (Image<Gray, Byte> t7 = t6.Erode(tester)) //{ // Image6 = ImageService.ToBitmapSource(t7); //} } tempImage2.Draw(new CircleF(centroid, 2), new Bgr(Color.Blue), 2); double distanceToSpine = double.MaxValue; PointF p11 = PointF.Empty, p22 = PointF.Empty; for (int i = 1; i < spineCornerCorrected.Length; i++) { PointF point1 = spineCornerCorrected[i - 1]; PointF point2 = spineCornerCorrected[i]; double cDist = MathExtension.MinDistanceFromLineToPoint(point1, point2, centroid); if (cDist < distanceToSpine) { p11 = point1; p22 = point2; distanceToSpine = cDist; } } PointSideVector psv = MathExtension.FindSide(p11, p22, centroid); if (psv == PointSideVector.Below) { distanceToSpine *= -1; } Console.WriteLine(distanceToSpine + ","); } } for (int i = 1; i < spine.Length; i++) { PointF point1 = spine[i - 1]; PointF point2 = spine[i]; point1.X += topCorner.X; point1.Y += topCorner.Y; point2.X += topCorner.X; point2.Y += topCorner.Y; LineSegment2D line = new LineSegment2D(new Point((int)point1.X, (int)point1.Y), new Point((int)point2.X, (int)point2.Y)); drawImage.Draw(line, new Bgr(Color.Aqua), 2); tempImage2.Draw(line, new Bgr(Color.Cyan), 2); } drawImage.Draw(new CircleF(mouseBlob.Centroid, 2), new Bgr(Color.Blue), 2); Image3 = ImageService.ToBitmapSource(drawImage); Image6 = ImageService.ToBitmapSource(tempImage2); double rotatedRectArea = rotatedRect.Size.Width * rotatedRect.Size.Height; if (rotatedRectArea < 75000) { //Console.WriteLine(rotatedRectArea); //return; } else { //Console.WriteLine(rotatedRectArea); } double height = rotatedRect.Size.Height; double width = rotatedRect.Size.Width; //double angle = rotatedRect.Angle; bool heightLong = height > width; double halfLength; PointF[] vertices = rotatedRect.GetVertices(); if (heightLong) { halfLength = height; } else { halfLength = width; } halfLength /= 2; PointF[] sidePoints1 = new PointF[4], midPoints = new PointF[2]; PointF p1 = vertices[0], p2 = vertices[1], p3 = vertices[2], p4 = vertices[3]; double d1 = p1.DistanceSquared(p2); double d2 = p2.DistanceSquared(p3); if (d1 < d2) { //p1 and p2, p3 and p4 are side points sidePoints1[0] = p1; sidePoints1[1] = p2; sidePoints1[2] = p4; sidePoints1[3] = p3; midPoints[0] = p1.MidPoint(p4); midPoints[1] = p2.MidPoint(p3); } else { //p2 and p3, p1 and p4 are side points sidePoints1[0] = p1; sidePoints1[1] = p4; sidePoints1[2] = p2; sidePoints1[3] = p3; midPoints[0] = p1.MidPoint(p2); midPoints[1] = p3.MidPoint(p4); } PointF intersection1 = PointF.Empty; PointF intersection2 = PointF.Empty; using (Image <Gray, Byte> halfTest1 = origGray.CopyBlank()) using (Image <Gray, Byte> halfTest2 = origGray.CopyBlank()) { Point[] rect1 = new Point[] { new Point((int)sidePoints1[0].X, (int)sidePoints1[0].Y), new Point((int)midPoints[0].X, (int)midPoints[0].Y), new Point((int)midPoints[1].X, (int)midPoints[1].Y), new Point((int)sidePoints1[1].X, (int)sidePoints1[1].Y) }; Point[] rect2 = new Point[] { new Point((int)sidePoints1[2].X, (int)sidePoints1[2].Y), new Point((int)midPoints[0].X, (int)midPoints[0].Y), new Point((int)midPoints[1].X, (int)midPoints[1].Y), new Point((int)sidePoints1[3].X, (int)sidePoints1[3].Y) }; if (MathExtension.PolygonContainsPoint(rect1, center)) { //Rect 1 is head, look for line in r2 } else if (MathExtension.PolygonContainsPoint(rect2, center)) { //Rect 2 is head, look for line in r1 } else { //Something has gone wrong } halfTest1.FillConvexPoly(rect1, new Gray(255)); halfTest2.FillConvexPoly(rect2, new Gray(255)); //Image5 = ImageService.ToBitmapSource(halfTest1); //Image6 = ImageService.ToBitmapSource(halfTest2); //binary.Copy(holder1, halfTest1); //binary.Copy(holder2, halfTest2); int count1, count2; //using (Image<Gray, Byte> binaryInverse = subbed.Not()) using (Image <Gray, Byte> holder1 = subbed.Copy(halfTest1)) using (Image <Gray, Byte> holder2 = subbed.Copy(halfTest2)) { //Image4 = ImageService.ToBitmapSource(subbed); //Image5 = ImageService.ToBitmapSource(holder1); //Image6 = ImageService.ToBitmapSource(holder2); count1 = holder1.CountNonzero()[0]; count2 = holder2.CountNonzero()[0]; } PointF qr1 = PointF.Empty, qr2 = PointF.Empty, qr3 = PointF.Empty, qr4 = PointF.Empty; if (count1 > count2) { //holder 1 is head, holder 2 is rear qr1 = sidePoints1[2]; qr2 = sidePoints1[2].MidPoint(midPoints[0]); qr3 = sidePoints1[3].MidPoint(midPoints[1]); qr4 = sidePoints1[3]; } else if (count1 < count2) { //holder 2 is head, holder 1 is year qr1 = sidePoints1[0]; qr2 = sidePoints1[0].MidPoint(midPoints[0]); qr3 = sidePoints1[1].MidPoint(midPoints[1]); qr4 = sidePoints1[1]; } //fat line is qr2, qr3 PointF centerPoint = qr2.MidPoint(qr3); PointF i1 = qr2; PointF i2 = qr3; intersection1 = MathExtension.PolygonLineIntersectionPoint(centerPoint, i1, mouseContour); intersection2 = MathExtension.PolygonLineIntersectionPoint(centerPoint, i2, mouseContour); } double deltaX = halfLength * Math.Cos(rotatedRect.Angle * MathExtension.Deg2Rad); double deltaY = halfLength * Math.Sin(rotatedRect.Angle * MathExtension.Deg2Rad); const double scaleFactor = 0.25; PointF newPoint = new PointF((float)(center.X - (deltaX * scaleFactor)), (float)(center.Y - (deltaY * scaleFactor))); PointF intersectionPoint1 = PointF.Empty; PointF intersectionPoint2 = PointF.Empty; Point[] temp = null; PointF[] headPoints = RBSKService.RBSKParallel(binary, MouseService.GetStandardMouseRules(), ref temp); if (headPoints != null) { PointF tip = headPoints[2]; //targetPoints = new LineSegment2DF[3]; Point centerInt = new Point((int)newPoint.X, (int)newPoint.Y); //targetPoints[0] = new LineSegment2DF(centerInt, new PointF(tip.X, tip.Y)); Vector forwardVec = new Vector(tip.X - newPoint.X, tip.Y - newPoint.Y); Vector rotatedVec = new Vector(-forwardVec.Y, forwardVec.X); PointF i1 = new PointF((float)(newPoint.X + (rotatedVec.X * 1)), (float)(newPoint.Y + (rotatedVec.Y * 1))); PointF i2 = new PointF((float)(newPoint.X - (rotatedVec.X * 1)), (float)(newPoint.Y - (rotatedVec.Y * 1))); //targetPoints[1] = new LineSegment2DF(centerInt, i1); //targetPoints[2] = new LineSegment2DF(centerInt, i2); intersectionPoint1 = MathExtension.PolygonLineIntersectionPoint(newPoint, i1, mouseContour); intersectionPoint2 = MathExtension.PolygonLineIntersectionPoint(newPoint, i2, mouseContour); } //displayImage.Draw(mouseBlob.BoundingBox, new Bgr(Color.Red), 2); displayImage.Draw(new CircleF(mouseBlob.Centroid, 3), new Bgr(Color.Blue), 2); displayImage.Draw(rotatedRect, new Bgr(Color.Yellow), 3); //displayImage.Draw(mouseContour, new Bgr(Color.Aqua), 2); //displayImage.FillConvexPoly(new Point[] { new Point((int)sidePoints1[0].X, (int)sidePoints1[0].Y), new Point((int)midPoints[0].X, (int)midPoints[0].Y), new Point((int)midPoints[1].X, (int)midPoints[1].Y), new Point((int)sidePoints1[1].X, (int)sidePoints1[1].Y) }, new Bgr(Color.Blue)); //if (targetPoints != null) //{ // displayImage.Draw(targetPoints[0], new Bgr(Color.Green), 2); // displayImage.Draw(targetPoints[1], new Bgr(Color.Green), 2); // displayImage.Draw(targetPoints[2], new Bgr(Color.Green), 2); //} //if (!intersection1.IsEmpty && !intersection2.IsEmpty) //{ // LineSegment2DF lineSegment = new LineSegment2DF(intersection1, intersection2); // displayImage.Draw(lineSegment, new Bgr(Color.MediumPurple), 4); // //Console.WriteLine(lineSegment.Length); //} //displayImage.Draw(new CircleF(newPoint, 4), new Bgr(Color.MediumPurple), 3); //Console.WriteLine(rotatedRect.Angle); Image4 = ImageService.ToBitmapSource(displayImage); } } }