public Mat GetBoxImages(double threshold1, double threshold2) { Mat template = GetGrayImage(); MCvScalar color = new MCvScalar(255, 255, 255); using (VectorOfVectorOfPoint contours = GetContours(threshold1, threshold2)) { for (int i = 0; i < contours.Size; i++) { RotatedRect box = CvInvoke.MinAreaRect(contours[i]); Rectangle rect = box.MinAreaRect(); int area = rect.Size.Height * rect.Size.Width; if (area < 200000) { continue; } if (rect.Size.Width > 0) { Debug.WriteLine("{0}:{1} / {2}/{3} : {4} : {5}", rect.Top, rect.Left, rect.Bottom, rect.Right, area, (double)rect.Size.Height / (double)rect.Size.Width); } CvInvoke.Rectangle(template, box.MinAreaRect(), color); } } return(template); }
public static Bitmap Run(Bitmap arg, ConfigSetting cfg) { RotatedRect elps = CvInvoke.FitEllipse(((beans.blob_tag)arg.Tag).contour); ((beans.blob_tag)arg.Tag).orientation = elps.Angle; ((beans.blob_tag)arg.Tag).maxAxis = new distance(elps.MinAreaRect().Width); ((beans.blob_tag)arg.Tag).minAxis = new distance(elps.MinAreaRect().Height); return(arg); }
private void getOriginImageToolStripMenuItem_Click(object sender, EventArgs e) { p_imShow.Image = null; UpdateParameter(); bool response = OpenCamera(); if (response == true) { using (Image <Bgr, byte> iBgr = GetImage()) { p_imShow.Image = iBgr.ToBitmap(); DialogResult kq = MessageBox.Show("You Sure create new origin image?", "Warning", MessageBoxButtons.YesNo, MessageBoxIcon.Warning); if (kq == DialogResult.Yes) { CvInvoke.Imwrite(@"img\origin.bmp", iBgr); Image <Gray, byte> iGray = ComputerVison.RoiImage(iBgr.Convert <Gray, byte>(), Config.Parameter.ROI); using (VectorOfPoint cnt = ComputerVison.FindContours(iGray, Config.Parameter.THRESHOLD_VALUE)) { RotatedRect a = CvInvoke.MinAreaRect(cnt); ORGRec = a.MinAreaRect(); using (Image <Bgr, byte> iBgr2 = iGray.Convert <Bgr, byte>()) { CvInvoke.Rectangle(iBgr2, ORGRec, new MCvScalar(0, 255, 0), 3); CvInvoke.Imwrite("img\\originRoi.bmp", iBgr2); } } iGray.Dispose(); } } Camera.CloseDevice(); } }
/// <summary> /// Draw All the Contours On An Image /// </summary> internal void FireImageEvent(object sender, Guid cardId, Guid imageId, ImageType imageType, double angle, float X, float Y, VectorOfVectorOfPoint contours, CannyParam cannyParameter = null, Func <Size, Rectangle, bool> contourFilter = null) { if (ImageEvent != null) { using (Mat rotatedImage = GetRotatedImage(angle)) { for (int i = 0; i < contours.Size; i++) { RotatedRect rotatedRect = CvInvoke.MinAreaRect(contours[i]); Rectangle box = rotatedRect.MinAreaRect(); if (contourFilter == null || (contourFilter != null && contourFilter(rotatedImage.Size, box))) { CvInvoke.DrawContours(rotatedImage, contours, i, yellow); CvInvoke.Rectangle(rotatedImage, box, green, thickness: 2); //if (tagged) //{ // CvInvoke.PutText(rotatedImage, string.Format("HR: {0:0.00} AR: {0:0.00}", heightRatio, aspectRatio), // new Point((int)box.X + (box.Width / 2), (int)box.Y + (box.Height / 2)), // FontFace.HersheyPlain, fontScale: 1, color: black, thickness: 2); //} } } Image.FireImageEvent(null, cardId, imageId, imageType, rotatedImage, angle, X, Y, cannyParameter: cannyParameter); } } }
private void GetOriginImage() { try { using (Mat img = CvInvoke.Imread(@"img\origin.bmp", Emgu.CV.CvEnum.ImreadModes.Grayscale)) { Image <Gray, byte> _img = img.ToImage <Gray, byte>(); _img = ComputerVison.RoiImage(_img, Config.Parameter.ROI); VectorOfPoint cnt = new VectorOfPoint(); cnt = ComputerVison.FindContours(_img, Config.Parameter.THRESHOLD_VALUE); RotatedRect a = CvInvoke.MinAreaRect(cnt); ORGRec = a.MinAreaRect(); ORGRec.X += Config.Parameter.ROI.X - 10; ORGRec.Y += Config.Parameter.ROI.Y - 10; ORGRec.Height += 20; ORGRec.Width += 20; using (Image <Bgr, byte> iBgr2 = _img.Convert <Bgr, byte>()) { CvInvoke.Rectangle(iBgr2, ORGRec, new MCvScalar(0, 255, 0), 3); CvInvoke.Imwrite("img\\originRoi.bmp", iBgr2); } Point[] p = ComputerVison.Search2Tip(cnt); ORG.SetPointA(p[0]); ORG.SetPointB(p[1]); cnt.Dispose(); _img.Dispose(); } } catch (Exception er) { MessageBox.Show(er.Message, "Error", MessageBoxButtons.OK, MessageBoxIcon.Warning); } }
private Rectangle GetRectangleFromContour(VectorOfPoint approxContour, LineSegment2D[] edgesList) { if (approxContour.Size > 2) { bool isRectangle = true; for (int j = 0; j < edgesList.Length; j++) { double angle = Math.Abs(edgesList[(j + 1) % edgesList.Length] .GetExteriorAngleDegree(edgesList[j])); if (angle < 85 || angle > 95) { isRectangle = false; break; } } if (isRectangle) { RotatedRect currentRectangle = CvInvoke.MinAreaRect(approxContour); Rectangle minRectangle = currentRectangle.MinAreaRect(); return(minRectangle); } } return(default(Rectangle)); }
static Rectangle[] detect_blue_rectangle(Image <Bgr, Byte> img1, Image <Bgr, Byte> img2) { List <Rectangle> ret = new List <Rectangle>(); if (img1.Size == img2.Size) { Image <Bgr, Byte> diff = img2.AbsDiff(img1); UMat uimage = new UMat(); CvInvoke.CvtColor(diff, uimage, ColorConversion.Bgr2Gray); UMat pyrDown = new UMat(); CvInvoke.PyrDown(uimage, pyrDown); CvInvoke.PyrUp(pyrDown, uimage); MCvScalar m1 = new MCvScalar(); MCvScalar m2 = new MCvScalar(); CvInvoke.MeanStdDev(uimage, ref m1, ref m2); Image <Gray, Byte> t = uimage.ToImage <Gray, Byte>().ThresholdBinary(new Gray(m1.V0 + m2.V0), new Gray(255)); uimage = t.ToUMat(); uimage.Save("temp_1.jpg"); using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) { //Matrix<int> hierarchy = new Matrix<int>(1, contours.Size); Mat hierarchy = new Mat(); CvInvoke.FindContours(uimage, contours, hierarchy, RetrType.External, ChainApproxMethod.ChainApproxNone); int count = contours.Size; for (int i = 0; i < count; i++) { RotatedRect rr = CvInvoke.MinAreaRect(contours[i]); Rectangle r = rr.MinAreaRect(); System.Diagnostics.Trace.WriteLine(string.Format("rect={0}", r)); if (r.Width > 50 && r.Height > 50 && r.X >= 0 && r.Y >= 0) { System.Diagnostics.Trace.WriteLine(string.Format("[{1}]: rect={0}", r, i)); CvInvoke.Rectangle(diff, rr.MinAreaRect(), new MCvScalar(255, 255, 0, 0)); using (VectorOfPoint contour = contours[i]) using (VectorOfPoint approxContour = new VectorOfPoint()) { CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, false); } } } } diff.Save("temp_3.jpg"); } return(ret.ToArray()); }
static double getDeskewAngleByLongestBlock(Image <Gray, byte> image) { //https://becominghuman.ai/how-to-automatically-deskew-straighten-a-text-image-using-opencv-a0c30aed83df CvInvoke.BitwiseNot(image, image); //to negative CvInvoke.GaussianBlur(image, image, new Size(9, 9), 0); //remove small spots CvInvoke.Threshold(image, image, 125, 255, ThresholdType.Otsu | ThresholdType.Binary); Mat se = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(30, 1), new Point(-1, -1)); CvInvoke.Dilate(image, image, se, new Point(-1, -1), 1, BorderType.Constant, CvInvoke.MorphologyDefaultBorderValue); //Emgu.CV.CvInvoke.Erode(image, image, null, new Point(-1, -1), 1, BorderType.Constant, CvInvoke.MorphologyDefaultBorderValue); VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); Mat hierarchy = new Mat(); CvInvoke.FindContours(image, contours, hierarchy, RetrType.External, ChainApproxMethod.ChainApproxSimple); if (contours.Size < 1) { return(-400); } int maxW = 0; VectorOfPoint bestContour = null; double angle = 0; for (int i = 0; i < contours.Size; i++) { RotatedRect rr = CvInvoke.MinAreaRect(contours[i]); Rectangle r = rr.MinAreaRect(); int w = r.Width > r.Height ? r.Width : r.Height; if (maxW < w) { maxW = w; angle = rr.Angle; bestContour = contours[i]; } } //if (n++ == 0) //{ // Image<Rgb, byte> image1 = image.Convert<Rgb, byte>(); // image1.Draw(bestContour.ToArray(), new Rgb(255, 0, 0), 1); // MainForm.This.PageBox.Image = image1.ToBitmap(); //} if (angle > 45) { angle -= 90; } else if (angle < -45) { angle += 90; } return(angle); }
static Image <Rgb, byte> deskew(Image <Rgb, byte> image) //good { //https://becominghuman.ai/how-to-automatically-deskew-straighten-a-text-image-using-opencv-a0c30aed83df Image <Gray, byte> image2 = image.Convert <Gray, byte>(); CvInvoke.BitwiseNot(image2, image2); //to negative CvInvoke.GaussianBlur(image2, image2, new Size(9, 9), 0); //remove small spots CvInvoke.Threshold(image2, image2, 125, 255, ThresholdType.Otsu | ThresholdType.Binary); Mat se = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(30, 5), new Point(-1, -1)); CvInvoke.Dilate(image2, image2, se, new Point(-1, -1), 1, BorderType.Constant, CvInvoke.MorphologyDefaultBorderValue); //Emgu.CV.CvInvoke.Erode(image, image, null, new Point(-1, -1), 1, BorderType.Constant, CvInvoke.MorphologyDefaultBorderValue); VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); Mat hierarchy = new Mat(); CvInvoke.FindContours(image2, contours, hierarchy, RetrType.External, ChainApproxMethod.ChainApproxSimple); if (contours.Size < 1) { return(null); } int maxW = 0; double angle = 0; for (int i = 0; i < contours.Size; i++) { RotatedRect rr = CvInvoke.MinAreaRect(contours[i]); Rectangle r = rr.MinAreaRect(); int w = r.Width > r.Height ? r.Width : r.Height; if (maxW < w) { maxW = w; angle = rr.Angle; } } if (angle > 45) { angle -= 90; } else if (angle < -45) { angle += 90; } RotationMatrix2D rotationMat = new RotationMatrix2D(); CvInvoke.GetRotationMatrix2D(new PointF((float)image.Width / 2, (float)image.Height / 2), angle, 1, rotationMat); Image <Rgb, byte> image3 = new Image <Rgb, byte>(image.Size); CvInvoke.WarpAffine(image, image3, rotationMat, image.Size); return(image3); }
static double getDeskewAngleByLongestBlock4(Image <Gray, byte> image) { //https://becominghuman.ai/how-to-automatically-deskew-straighten-a-text-image-using-opencv-a0c30aed83df CvInvoke.BitwiseNot(image, image); //to negative CvInvoke.GaussianBlur(image, image, new Size(9, 9), 0); //remove small spots CvInvoke.Threshold(image, image, 125, 255, ThresholdType.Otsu | ThresholdType.Binary); Mat se = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(30, 1), new Point(-1, -1)); CvInvoke.Dilate(image, image, se, new Point(-1, -1), 1, BorderType.Constant, CvInvoke.MorphologyDefaultBorderValue); //Emgu.CV.CvInvoke.Erode(image, image, null, new Point(-1, -1), 1, BorderType.Constant, CvInvoke.MorphologyDefaultBorderValue); VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); Mat hierarchy = new Mat(); CvInvoke.FindContours(image, contours, hierarchy, RetrType.External, ChainApproxMethod.ChainApproxSimple); if (contours.Size < 1) { return(-400); } List <Angle> angles = new List <Angle>();// (a*wa+b*wb)/(n*(wa+wb)) for (int i = 0; i < contours.Size; i++) { RotatedRect rr = CvInvoke.MinAreaRect(contours[i]); double a = rr.Angle; Rectangle r = rr.MinAreaRect(); int w = r.Width > r.Height ? r.Width : r.Height; if (a > 45) { a -= 90; } else if (a < -45) { a += 90; } angles.Add(new Angle { angle = a, width = w }); } //double s = angles.Sum(a => a.angle * a.width); //double b = angles.Sum(a => a.width) * angles.Count; //double angle = s / b; angles = angles.OrderByDescending(a => a.width).ToList(); double angle = (angles[0].angle + angles[1].angle + angles[2].angle) / 3; return(angle); }
/// <summary> /// Get Original Title Image With Contour Lines Drawn /// </summary> public Mat GetContouredImage(double angle, CannyParam cannyParameter, float minArea = 5000.0F) { if (cannyParameter == null) { throw new ArgumentNullException("cannyParameter"); } if (_contouredImage.image == null || !_contouredImage.cannyParam.Equals(cannyParameter)) { if (_contouredImage.image != null) { _contouredImage.image.Dispose(); _contouredImage.image = null; } _contouredImage.image = GetGreyImage(angle); using (Mat titleCannyImage = GetCannyImage(() => { return(GetGreyImage(angle)); }, angle, cannyParameter)) { using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) { CvInvoke.FindContours(titleCannyImage, contours, hierarchy: null, mode: RetrType.List, method: ChainApproxMethod.ChainApproxNone); for (int i = 1; i < contours.Size; i++) { RotatedRect rotatedRect = CvInvoke.MinAreaRect(contours[i]); float area = rotatedRect.Size.Width * rotatedRect.Size.Height; if (area > minArea) { CvInvoke.DrawContours(_contouredImage.image, contours, i, yellow, thickness: 2); Rectangle box = rotatedRect.MinAreaRect(); CvInvoke.Rectangle(_contouredImage.image, box, green, thickness: 2); } } _contouredImage.cannyParam = cannyParameter; } } } return(_contouredImage.image.Clone()); }
/// <summary> /// Check if a contour has the features of a plate /// </summary> /// <param name="type">0: rectangle plate, 1: square plate</param> /// <param name="feature">contains X, Y, Width, Height and angle of the plate</param> /// <returns></returns> public static bool validate_rotation_and_ratio(type_of_plate type, RotatedRect plate_feature) { Rectangle r = plate_feature.MinAreaRect(); double W = r.Width; double H = r.Height; double angle = plate_feature.Angle; if (W > H) { angle = (-1) * angle; } else { angle = 90 + angle; } if (angle > 15) { return(false); } if (W == 0 || H == 0) { return(false); } double area = W * H; if (ratio_check(type, area, W, H)) { return(true); } else { return(false); } }
public static Rectangle[] detect_blue_rectangle_1(Image <Bgr, Byte> img1, Image <Bgr, Byte> img2) { List <Rectangle> ret = new List <Rectangle>(); if (img1.Size == img2.Size) { Image <Bgr, Byte> diff = img2.AbsDiff(img1); UMat uimage = new UMat(); CvInvoke.CvtColor(diff, uimage, ColorConversion.Bgr2Gray); UMat pyrDown = new UMat(); CvInvoke.PyrDown(uimage, pyrDown); CvInvoke.PyrUp(pyrDown, uimage); //uimage.Save("temp_1.jpg"); double cannyThreshold = 20.0; double cannyThresholdLinking = 120.0; UMat cannyEdges = new UMat(); CvInvoke.Canny(uimage, cannyEdges, cannyThreshold, cannyThresholdLinking); using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) { CvInvoke.FindContours(cannyEdges, contours, null, RetrType.External, ChainApproxMethod.ChainApproxSimple); int count = contours.Size; for (int i = 0; i < count; i++) { RotatedRect rr = CvInvoke.MinAreaRect(contours[i]); Rectangle r = rr.MinAreaRect(); if (r.X > 0 && r.Y > 0 && r.Width > 100 && r.Height > 100) { //System.Diagnostics.Trace.WriteLine(string.Format("rect={0}", r)); ret.Add(r); } } } /* * LineSegment2D[] lines = CvInvoke.HoughLinesP( * cannyEdges, * 1, //Distance resolution in pixel-related units * Math.PI / 45.0, //Angle resolution measured in radians. * 20, //threshold * 30, //min Line width * 10); //gap between lines * //cannyEdges.Save("temp_2.jpg"); * List<Triangle2DF> triangleList = new List<Triangle2DF>(); * List<RotatedRect> boxList = new List<RotatedRect>(); //a box is a rotated rectangle * using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) * { * CvInvoke.FindContours(cannyEdges, contours, null, RetrType.External, ChainApproxMethod.ChainApproxSimple); * int count = contours.Size; * for (int i = 0; i < count; i++) * { * using (VectorOfPoint contour = contours[i]) * using (VectorOfPoint approxContour = new VectorOfPoint()) * { * CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true); * double a = CvInvoke.ContourArea(approxContour, false); * //System.Diagnostics.Trace.WriteLine(string.Format("area={0}", a)); * if (a > 50) //only consider contours with area greater than 250 * { * RotatedRect rr = CvInvoke.MinAreaRect(approxContour); * Rectangle r = rr.MinAreaRect(); * //CvInvoke.Polylines(diff, rr.GetVertices(), true, new MCvScalar(255, 255, 0, 0), 5); * System.Diagnostics.Trace.WriteLine(string.Format("rect={0} and size={1}", r, r.Width * r.Height)); * if (r.Width * r.Height < 20000 && r.Width * r.Height > 10000 && r.X > 0 && r.Y > 0&& r.Width > 100 && r.Height > 100) * { * //System.Diagnostics.Trace.WriteLine(string.Format("rect={0} and size={1}", r, r.Width * r.Height)); * ret.Add(r); * //CvInvoke.Rectangle(diff, rr.MinAreaRect(), new MCvScalar(255, 255, 0, 0)); * } * //diff.Save("temp_3.jpg"); * } * } * } * }*/ } else { // 2 image are not in same size. } if (ret.Count > 2) { ret = new List <Rectangle>(MachineLearning.blue_block_filter(ret.ToArray())); } return(ret.ToArray()); }
private void detectCards(Mat image, int cardsToDetect) { Mat gray = new Mat(), blur = new Mat(), thresh = new Mat(); CvInvoke.CvtColor(image, gray, ColorConversion.Bgr2Gray); /* * Mat binary = new Mat(gray.Size, gray.Depth, gray.NumberOfChannels); * for(int i = 0; i < gray.Rows*gray.Cols; ++i) * { * if (gray.GetData()[i] > 200) * { * binary.GetData()[i] = 201; * } * else * { * binary.GetData()[i] = 0; * } * }*/ CvInvoke.GaussianBlur(gray, blur, new System.Drawing.Size(1, 1), 1000); CvInvoke.Threshold(blur, thresh, 200, 255, ThresholdType.Binary); Mat hierarchy = new Mat(); VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); CvInvoke.FindContours(thresh, contours, hierarchy, RetrType.Tree, ChainApproxMethod.ChainApproxNone); for (int i = 0; i < contours.Size; ++i) { int j = i; while (j > 0 && CvInvoke.ContourArea(contours[j - 1], false) < CvInvoke.ContourArea(contours[j], false)) { VectorOfPoint tmp = new VectorOfPoint(); tmp.Push(contours[j]); contours[j].Clear(); contours[j].Push(contours[j - 1]); contours[j - 1].Clear(); contours[j - 1].Push(tmp); j--; } } List <RotatedRect> foundCards = new List <RotatedRect>(); Image <Bgr, byte> tst = new Image <Bgr, byte>(image.Bitmap); int numCards = 0; for (int i = 0; i < contours.Size && numCards < cardsToDetect; ++i) { VectorOfPoint card = contours[i]; double area = CvInvoke.ContourArea(card, false); double peri = CvInvoke.ArcLength(card, true); VectorOfPoint approx = new VectorOfPoint(); CvInvoke.ApproxPolyDP(card, approx, 0.02 * peri, true); RotatedRect rect = CvInvoke.MinAreaRect(card); System.Drawing.PointF[] r = CvInvoke.BoxPoints(rect); bool stop = false; for (int j = 0; j < foundCards.Count; ++j) { RotatedRect crd = foundCards[j]; System.Drawing.PointF center = crd.Center; if (rect.MinAreaRect().Left < center.X && rect.MinAreaRect().Right > center.X && rect.MinAreaRect().Top < center.Y && rect.MinAreaRect().Bottom > center.Y) { stop = true; } } if (stop) { continue; } numCards++; System.Drawing.PointF[] points = new System.Drawing.PointF[4], points2 = new System.Drawing.PointF[4]; points[0] = new System.Drawing.PointF(0, 0); points[1] = new System.Drawing.PointF(799, 0); points[2] = new System.Drawing.PointF(799, 799); points[3] = new System.Drawing.PointF(0, 799); for (int j = 0; j < approx.Size && j < 4; ++j) { points2[j] = approx[j]; } Array.Sort(points2, (a, b) => (int)(a.Y - b.Y)); if (points2[0].X < points2[1].X) { System.Drawing.PointF tmp = points2[0]; points2[0] = points2[1]; points2[1] = tmp; } if (points2[2].X > points2[3].X) { System.Drawing.PointF tmp = points2[2]; points2[2] = points2[3]; points2[3] = tmp; } Mat transform = CvInvoke.GetPerspectiveTransform(points2, points); Mat warp = new Mat(); CvInvoke.WarpPerspective(blur, warp, transform, new System.Drawing.Size(800, 800)); //ImageViewer viewer = new ImageViewer(binary); //viewer.ShowDialog(); Card recognizedCard = null; foreach (var c in library) { if (recognizedCard == null) { recognizedCard = library[0]; } Mat diff1 = new Mat(), diff2 = new Mat(); CvInvoke.AbsDiff(recognizedCard.GetImage(), warp, diff1); CvInvoke.AbsDiff(c.GetImage(), warp, diff2); var data1 = diff1.GetData(); int sum1 = 0; foreach (var d in data1) { sum1 += d; } var data2 = diff2.GetData(); int sum2 = 0; foreach (var d in data2) { sum2 += d; } if (sum2 <= sum1) { recognizedCard = c; } } //ImageViewer asd = new ImageViewer(recognizedCard.GetImage()); //asd.ShowDialog(); Console.WriteLine(recognizedCard.GetColor() + ", " + recognizedCard.GetValue()); if (cardsOnTable.Count == 0 || (cardsOnTable[cardsOnTable.Count - 1].GetColor() == recognizedCard.GetColor() && cardsOnTable[cardsOnTable.Count - 1].GetValue() == recognizedCard.GetValue())) { cardsOnTable.Add(recognizedCard); } foundCards.Add(rect); } }
/// <summary> /// check whether the plate is tilted or not, if yes, rotate it. /// </summary> /// <param name="plate"></param> /// <param name="plate_feature"></param> /// <returns>rotated image</returns> private static Image <Gray, byte> crop_and_rotated_plate(Image <Gray, byte> plate, RotatedRect plate_feature) { PointF[] boxes = CvInvoke.BoxPoints(plate_feature); List <double> Xs = new List <double>(); List <double> Ys = new List <double>(); foreach (PointF box in boxes) { Xs.Add(box.X); Ys.Add(box.Y); } double Xmax = Xs.Max(); double Ymax = Ys.Max(); double Xmin = Xs.Min(); double Ymin = Ys.Min(); Rectangle r = plate_feature.MinAreaRect(); double X = r.X; double Y = r.Y; double W = r.Width; double H = r.Height; double angle = plate_feature.Angle; float X_center = (float)(Xmax + Xmin) / 2; float Y_center = (float)(Ymax + Ymin) / 2; Size patch_size = new Size((int)(Xmax - Xmin), (int)(Ymax - Ymin)); if (angle < (-45)) { angle = angle + 90; } if (angle == 0) { return(plate); } Mat map_matrix = new Mat(new Size(2, 3), DepthType.Cv64F, 1); PointF center = new PointF(X_center, Y_center); CvInvoke.GetRotationMatrix2D(center, angle, 1.0, map_matrix); Image <Gray, byte> cropped = new Image <Gray, byte>(patch_size); CvInvoke.GetRectSubPix(plate, patch_size, center, cropped); Image <Gray, byte> warp_affine = cropped.WarpAffine(map_matrix, Inter.Linear, Warp.Default, BorderType.Default, new Gray(0)); int warp_H = 0; int warp_W = 0; if (warp_affine.Width > warp_affine.Height) { warp_H = warp_affine.Height; warp_W = warp_affine.Width; } else { warp_H = warp_affine.Width; warp_W = warp_affine.Height; } Image <Gray, byte> output = new Image <Gray, byte>(warp_W, warp_H); CvInvoke.GetRectSubPix(warp_affine, new Size(warp_W, warp_H), center, output); //CvInvoke.Imshow("warpaffine", warp_affine); //CvInvoke.Imshow("GetRectSubPix", output); //CvInvoke.WaitKey(); //CvInvoke.Imshow("plate", output); //CvInvoke.WaitKey(); return(warp_affine); //return plate; }
private void Handling() { Response result = new Response(); string name = DateTime.Now.Year.ToString() + "_" + DateTime.Now.Month.ToString() + "_" + DateTime.Now.Day.ToString() + "_" + DateTime.Now.Hour.ToString() + "_" + DateTime.Now.Minute.ToString() + "_" + DateTime.Now.Second.ToString(); Light_Mode(modeCamera.Checked); Thread.Sleep(500); Image <Bgr, byte> iBgr = GetImage(); Light_Mode(false); Image <Gray, byte> iGray = iBgr.Convert <Gray, byte>(); iGray = ComputerVison.RoiImage(iGray, Config.Parameter.ROI); VectorOfPoint cnt = new VectorOfPoint(); Point[] p = new Point[2]; cnt = ComputerVison.FindContours(iGray, Config.Parameter.THRESHOLD_VALUE); if (cnt != null) { p = ComputerVison.Search2Tip(cnt); ComputerVison.Calculator(ref result, ORG.PointA, ORG.PointB, p[0], p[1], Config.Parameter.LabelSize.Width, false); ComputerVison.RouPoint(ORG.PointO, ref p[0], result.ANGLE); ComputerVison.RouPoint(ORG.PointO, ref p[1], result.ANGLE); ComputerVison.Calculator(ref result, ORG.PointA, ORG.PointB, p[0], p[1], Config.Parameter.LabelSize.Width, true); short y = (short)Math.Round(result.X * Config.Parameter.PULSE_Y + 150); short x = (short)Math.Round(result.Y * Config.Parameter.PULSE_X + 20); short z = (short)Math.Round(-result.ANGLE * Config.Parameter.PULSE_Z / 360); if (modeCamera.Checked) { PLCCommunicate(x, y, z); } //log.Invoke(new MethodInvoker(delegate () //{ // log.Text = "X:" + x.ToString("F3") + " Y:" + y.ToString("F3") + " Angle:" + z.ToString(); //})); iGray = iBgr.Convert <Gray, byte>(); ComputerVison.RotationImage(ref iGray, Config.Parameter.ROTATION_CENTER, (float)result.ANGLE); cnt = ComputerVison.FindContours(iGray, Config.Parameter.THRESHOLD_VALUE); using (Image <Bgr, byte> iBgr2 = iGray.Convert <Bgr, byte>()) { if (cnt != null) { RotatedRect r = CvInvoke.MinAreaRect(cnt); CvInvoke.Rectangle(iBgr2, r.MinAreaRect(), new MCvScalar(0, 255, 0), 3); } if (modeCamera.Checked) { CvInvoke.Imwrite(@"backup\" + name + ".bmp", iBgr); CvInvoke.Imwrite(@"backup_H\" + name + "trans.bmp", iBgr2); } p_imShow.Invoke(new MethodInvoker(delegate() { p_imShow.Image = iBgr2.ToBitmap(); })); } cnt.Dispose(); } else { using (Image <Bgr, byte> iBgr2 = iGray.Convert <Bgr, byte>()) { CvInvoke.Rectangle(iBgr2, ORGRec, new MCvScalar(0, 255, 0), 3); p_imShow.Invoke(new MethodInvoker(delegate() { p_imShow.Image = iBgr2.ToBitmap(); })); } DialogResult kq = MessageBox.Show("Not found label from images! You want to try again!", "Warning", MessageBoxButtons.YesNo, MessageBoxIcon.Warning); if (kq == DialogResult.Yes) { iGray.Dispose(); iBgr.Dispose(); Handling(); } else { result.ANGLE = 0; result.X = 0; result.Y = 0; } } iGray.Dispose(); iBgr.Dispose(); }
private Point FindMarker(Mat frame, out Rectangle boundingRect) { boundingRect = Rectangle.Empty; if (frame.IsEmpty) { return(Point.Empty); } Point location = Point.Empty; Point frameCenter = new Point(frame.Width / 2, frame.Height / 2); double distanceFromFrameCenter = 0.0; // convert to gray scale Mat gray = new Mat(); if (frame.NumberOfChannels == 3) { CvInvoke.CvtColor(frame, gray, ColorConversion.Bgr2Gray); } else { gray = frame.Clone(); } // add median blurring Mat blured = new Mat(); int blurSize = (int)Properties.Settings.Default.BlurSize; if (blurSize > 0) { blurSize += (blurSize + 1) % 2; CvInvoke.MedianBlur(gray, blured, blurSize); } else { blured = gray.Clone(); } gray.Dispose(); // detect edges Mat cannyEdges = new Mat(); CvInvoke.Canny(blured, cannyEdges, (double)Properties.Settings.Default.CannyThreshold1, (double)Properties.Settings.Default.CannyThreshold2); blured.Dispose(); // find contours using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) { CvInvoke.FindContours(cannyEdges, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple); for (int i = 0; i < contours.Size; i++) { using (VectorOfPoint contour = contours[i]) using (VectorOfPoint approxContour = new VectorOfPoint()) { // find polygon CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * (double)Properties.Settings.Default.ApproxPolyEpsilon, true); // check minimal size if (CvInvoke.ContourArea(approxContour, false) > 100) { // check if polygon in convex bool isConvex = true; LineSegment2D[] edges = PointCollection.PolyLine(approxContour.ToArray(), true); for (int j = 0; j < edges.Length; j++) { double angle = Math.Abs(edges[(j + 1) % edges.Length].GetExteriorAngleDegree(edges[j])); if (angle > 170) { isConvex = false; break; } } if (isConvex) { // draw marker if (IsTuning) { CvInvoke.Polylines(cannyEdges, approxContour, true, new MCvScalar(255), 2); } // compute center point RotatedRect rect = CvInvoke.MinAreaRect(approxContour); Point p = new Point((int)rect.Center.X, (int)rect.Center.Y); // take the nearest marker from the center double d = CalcDistance(frameCenter, p); if (location.IsEmpty || d < distanceFromFrameCenter) { location = p; distanceFromFrameCenter = d; boundingRect = rect.MinAreaRect(); } } } } } } // show detection if (IsTuning) { ImgBoxDiff.Image = cannyEdges; } cannyEdges.Dispose(); return(location); }
public void CutRectangleImage(string imagePath) { Image <Bgr, Byte> src = new Image <Bgr, byte>(imagePath); int scale = 1; if (src.Width > 500) { scale = 2; } if (src.Width > 1000) { scale = 10; } if (src.Width > 10000) { scale = 100; } var size = new Size(src.Width / scale, src.Height / scale); Image <Bgr, Byte> srcNewSize = new Image <Bgr, byte>(size); CvInvoke.Resize(src, srcNewSize, size); //将图像转换为灰度 UMat grayImage = new UMat(); CvInvoke.CvtColor(srcNewSize, grayImage, ColorConversion.Bgr2Gray); //使用高斯滤波去除噪声 CvInvoke.GaussianBlur(grayImage, grayImage, new Size(3, 3), 3); UMat cannyEdges = new UMat(); CvInvoke.Canny(grayImage, cannyEdges, 60, 180);//通过边缘化,然后取出轮廓 #region 取三角形和矩形的顶点坐标 List <Triangle2DF> triangleList = new List <Triangle2DF>(); List <RotatedRect> boxList = new List <RotatedRect>(); //旋转的矩形框 using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) { CvInvoke.FindContours(cannyEdges, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple); int count = contours.Size; for (int i = 0; i < count; i++) { using (VectorOfPoint contour = contours[i]) using (VectorOfPoint approxContour = new VectorOfPoint()) { CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.08, true); //仅考虑面积大于50的轮廓 if (CvInvoke.ContourArea(approxContour, false) > 50) { if (approxContour.Size == 3) //轮廓有3个顶点:三角形 { System.Drawing.Point[] pts = approxContour.ToArray(); triangleList.Add(new Triangle2DF(pts[0], pts[1], pts[2])); } else if (approxContour.Size == 4) //轮廓有4个顶点 { #region 检测角度,如果角度都在 [80, 100] 之间,则为矩形 bool isRectangle = true; System.Drawing.Point[] pts = approxContour.ToArray(); LineSegment2D[] edges = Emgu.CV.PointCollection.PolyLine(pts, true); for (int j = 0; j < edges.Length; j++) { double angle = Math.Abs(edges[(j + 1) % edges.Length].GetExteriorAngleDegree(edges[j])); if (angle < 80 || angle > 100) { isRectangle = false; break; } } #endregion if (isRectangle) { boxList.Add(CvInvoke.MinAreaRect(approxContour)); } } } } } } #endregion #region 保存剪切的最大的矩形图片 Rectangle rectangle = new Rectangle(0, 0, src.Width, src.Height); int maxWidth = 0; //boxList = boxList.Where(p => p.Size.Width > 300).ToList(); for (int i = 0; i < boxList.Count(); i++) { RotatedRect box = boxList[i]; Rectangle rectangleTemp = box.MinAreaRect(); //这里对取到的顶点坐标进行了加宽,因为矩形可能存在角度,这里没有进行角度旋转,所以加宽了取值范围就可以取到完整的图了 rectangleTemp = new Rectangle(rectangleTemp.X * scale, rectangleTemp.Y * scale, rectangleTemp.Width * scale + scale, rectangleTemp.Height * scale + scale); //取最大的矩形图片 if (rectangleTemp.Width > maxWidth) { maxWidth = rectangleTemp.Width; rectangle = rectangleTemp; } } src.Draw(rectangle, new Bgr(System.Drawing.Color.Red), 4); //在图片中画线 CvInvoke.Imwrite("原始图片.bmp", src); //保存原始图片 CvInvoke.cvSetImageROI(src.Ptr, rectangle); //设置兴趣点—ROI(region of interest ) var clone = src.Clone(); CvInvoke.Imwrite("剪切的矩形图片.bmp", clone); //保存结果图 #endregion src.Dispose(); srcNewSize.Dispose(); grayImage.Dispose(); }
private Rectangle LocateROI() { int prediction = -1; VectorOfPoint contourOfInterest = new VectorOfPoint(); int index = 0; index = ImgProc.LargestContourIndex(_contour); contourOfInterest = _contour[index]; MCvMoments moment = CvInvoke.Moments(contourOfInterest); double[] huMoment = moment.GetHuMoment(); prediction = _svm.Compute(huMoment); //foreach (VectorOfPoint vp in _listOfContours.GetRange(0, 5)) //{ // MCvMoments moment = CvInvoke.Moments(vp); // double[] huMoment = moment.GetHuMoment(); // prediction = _svm.Compute(huMoment); // if (prediction == CLASSIFICATION_ARM || prediction == CLASSIFICATION_HAND) // { // contourOfInterest = vp; // break; // } //} if (prediction == CLASSIFICATION_REJECT) { return(Rectangle.Empty); } else if (prediction == CLASSIFICATION_HAND) { //Rectangle rectRotRect = rectRot.MinAreaRect(); //Rectangle init = CvInvoke.MinAreaRect(contoursEval1[largestContourIndexEval1]).MinAreaRect(); //Point final = new Point(rectRotRect.X + init.X, rectRotRect.Y + init.Y); //return new Rectangle(final, init.Size); return(CvInvoke.MinAreaRect(contourOfInterest).MinAreaRect()); } else if (prediction == CLASSIFICATION_ARM) { Mat convexityDefect = new Mat(); VectorOfInt hull = new VectorOfInt(); CvInvoke.ConvexHull(contourOfInterest, hull, false, false); CvInvoke.ConvexityDefects(contourOfInterest, hull, convexityDefect); RotatedRect rectRot = CvInvoke.MinAreaRect(contourOfInterest); ModifiedRotatedRect rotRectMod = new ModifiedRotatedRect(rectRot); int yDel = 0; double ptLftToRight = Geometry.Distance(rotRectMod.Pul, rotRectMod.Pur); double ptUpToDown = Geometry.Distance(rotRectMod.Pul, rotRectMod.Pll); if (!convexityDefect.IsEmpty) { Matrix <int> convex = new Matrix <int>(convexityDefect.Rows, convexityDefect.Cols, convexityDefect.NumberOfChannels); convexityDefect.CopyTo(convex); List <Point> contourTmp = new List <Point>(); for (int i = 0; i < contourOfInterest.Size; i++) { contourTmp.Add(contourOfInterest[i]); } List <ConvexDefects> convexDefectList = new List <ConvexDefects>(); for (int i = 0; i < convex.Rows; i++) { // do not touch int startIdx = convex.Data[i, 0]; int endIdx = convex.Data[i, 1]; int pointIdx = convex.Data[i, 2]; Point startPt = contourOfInterest[startIdx]; Point endPt = contourOfInterest[endIdx]; Point defectPt = contourOfInterest[pointIdx]; // do not touch convexDefectList.Add(new ConvexDefects(startPt, endPt, defectPt)); } if (ptLftToRight <= ptUpToDown) { Point pc1Tmp = convexDefectList[0].DefectPt; Point pc2Tmp = convexDefectList[1].DefectPt; Point pc = pc1Tmp.Y > pc2Tmp.Y ? pc1Tmp : pc2Tmp; Point ptUpLeft = rotRectMod.Pul; Point ptUpRight = rotRectMod.Pur; Point ptLowLeft = rotRectMod.Pll; Point ptLowRight = rotRectMod.Plr; ModifiedRotatedRect rotRectEval1 = ModifiedRotatedRect.Cut(ptUpLeft, ptUpRight, ptLowLeft, ptLowRight, pc); ModifiedRotatedRect rotRectEval2 = ModifiedRotatedRect.Cut(ptUpLeft, ptUpRight, ptLowLeft, ptLowRight, pc, true); Size sizeFrame = ImageInput.Size; Rectangle rectROIEval1 = rotRectEval1.ToRect(sizeFrame); Rectangle rectROIEval2 = rotRectEval2.ToRect(sizeFrame); Mat cloneMat1 = ImageInput.Clone().Mat; Mat matToBeEval1 = new Mat(cloneMat1, rectROIEval1); VectorOfVectorOfPoint contoursEval1 = new VectorOfVectorOfPoint(); Mat matHierachyEval1 = new Mat(); CvInvoke.FindContours(matToBeEval1, contoursEval1, matHierachyEval1, Emgu.CV.CvEnum.RetrType.External, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxTc89L1); int largestContourIndexEval1 = ImgProc.LargestContourIndex(contoursEval1); MCvMoments momentEval1 = CvInvoke.Moments(contoursEval1[largestContourIndexEval1]); double[] huMomentsEval1 = momentEval1.GetHuMoment(); double[] featureVectorSearch = ScaleValues(huMomentsEval1, 5000.0); int predictionEval1 = _svm.Compute(featureVectorSearch, MulticlassComputeMethod.Elimination); //double[] featureVectorHand = ScaleValues(huMomentsEval1. // .GetRange(0, _svmMachineHand.Inputs).ToArray(), 1000.0); if (predictionEval1 == CLASSIFICATION_HAND) { Rectangle rectRotRect = rectRot.MinAreaRect(); Rectangle init = CvInvoke.MinAreaRect(contoursEval1[largestContourIndexEval1]).MinAreaRect(); Point final = new Point(rectRotRect.X + init.X, rectRotRect.Y + init.Y); return(new Rectangle(final, init.Size)); } else { return(Rectangle.Empty); } } else { return(Rectangle.Empty); } } else { return(Rectangle.Empty); } } else { return(Rectangle.Empty); } }
/// <summary> /// Determine if the countor represents a card. /// </summary> public bool TryFindCard(Guid cardId, Size fieldImageSize, VectorOfPoint countor, out Mat result) { result = null; RotatedRect rotatedRect = CvInvoke.MinAreaRect(countor); // Prevent Divide By Zero if (rotatedRect.Size.Width == 0) { return(false); } float angle = 0F; float width = rotatedRect.Size.Width; float height = rotatedRect.Size.Height; float area = width * height; float heightRatio = rotatedRect.Size.Height / (float)fieldImageSize.Height; float widthRatio = rotatedRect.Size.Width / (float)fieldImageSize.Width; float relativeCenterX = rotatedRect.Center.X / (float)fieldImageSize.Width; float relativeCenterY = rotatedRect.Center.Y / (float)fieldImageSize.Height; Rectangle box = rotatedRect.MinAreaRect(); float boxAspectRatio = box.Size.Width > box.Size.Height ? (float)box.Size.Height / (float)box.Size.Width : (float)box.Size.Width / (float)box.Size.Height; // Prevent Divide By Zero if ((rotatedRect.Size.Height == 0) || (rotatedRect.Size.Width == 0)) { return(false); } float aspectRatio = (float)width / (float)height; // Rotate card if it is on it's side if (width > height) { aspectRatio = (float)height / (float)width; angle = -90.0F; } // Card should have a height if (height < 1.0F) { return(false); } // Too small to parse if (height < 500.0F) { return(false); } // Too small to parse if (width < 500.0F) { return(false); } Debug.WriteLine("Potential Card Contour - Center: {0}/{1} Relative Center: ({9:0.00}%)/({10:0.00}%) Width: {2} ({11:0.00}%) Height: {3} ({12:0.00}%) Area: {4} : AspectRatio: {5}, Angle: {6} Image Size: {7}/{8}", rotatedRect.Center.X, rotatedRect.Center.Y, rotatedRect.Size.Width, rotatedRect.Size.Height, area, aspectRatio, rotatedRect.Angle, fieldImageSize.Width, fieldImageSize.Height, relativeCenterX * 100.0F, relativeCenterY * 100.0F, widthRatio * 100.0F, heightRatio * 100.0F); // Find the Card Aspect Raito if (aspectRatio < MinCardAspectRatio || aspectRatio > MaxCardAspectRatio) { return(false); } using (Mat image = GetImage()) { using (Mat rot_mat = new RotationMatrix2D(rotatedRect.Center, rotatedRect.Angle + angle, 1.0)) { using (Mat rotated = new Mat()) { // Rotate CvInvoke.WarpAffine(image, rotated, rot_mat, image.Size, interpMethod: Inter.Cubic); // Adjust For Rotation Size size; if (rotatedRect.Angle + angle < -90) { size = new Size((int)rotatedRect.Size.Height, (int)rotatedRect.Size.Width); } else { size = new Size((int)rotatedRect.Size.Width, (int)rotatedRect.Size.Height); } using (Mat cropped = new Mat()) { CvInvoke.GetRectSubPix(rotated, size, rotatedRect.Center, cropped); result = cropped.Clone(); return(true); } } } } }
static Image <Rgb, byte> deskew(Image <Rgb, byte> image, Size structuringElementSize, int contourMaxCount, double angleMaxDeviation, Size margin, Rgb marginRgb) //good { //https://becominghuman.ai/how-to-automatically-deskew-straighten-a-text-image-using-opencv-a0c30aed83df Image <Gray, byte> image2 = image.Convert <Gray, byte>(); CvInvoke.BitwiseNot(image2, image2);//to negative //CvInvoke.GaussianBlur(image2, image2, new Size((int)(9f / Settings.Constants.Pdf2ImageResolutionRatio), (int)(9f / Settings.Constants.Pdf2ImageResolutionRatio)), 0);//remove small spots CvInvoke.Threshold(image2, image2, 125, 255, ThresholdType.Otsu | ThresholdType.Binary); Mat se = CvInvoke.GetStructuringElement(ElementShape.Rectangle, structuringElementSize, new Point(-1, -1)); CvInvoke.Dilate(image2, image2, se, new Point(-1, -1), 1, BorderType.Constant, CvInvoke.MorphologyDefaultBorderValue); //Emgu.CV.CvInvoke.Erode(image, image, null, new Point(-1, -1), 1, BorderType.Constant, CvInvoke.MorphologyDefaultBorderValue); Image <Rgb, byte> image3 = new Image <Rgb, byte>(image.Width + 2 * margin.Width, image.Height + 2 * margin.Height, marginRgb); image3.ROI = new Rectangle(new Point(margin.Width, margin.Height), image.Size); image.CopyTo(image3); image3.ROI = Rectangle.Empty; VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); Mat hierarchy = new Mat(); CvInvoke.FindContours(image2, contours, hierarchy, RetrType.List, ChainApproxMethod.ChainApproxSimple); if (contours.Size < 1) { return(null); } double angle = 0; //when contourMaxCount == 1, it just looks by the most lengthy block List <(double angle, int w)> cs = new List <(double angle, int w)>(); for (int i = 0; i < contours.Size; i++) { RotatedRect rr = CvInvoke.MinAreaRect(contours[i]); Rectangle r = rr.MinAreaRect(); int w = r.Width > r.Height ? r.Width : r.Height; double a = rr.Angle; if (a > 45) { a -= 90; } else if (a < -45) { a += 90; } cs.Add((angle: a, w: w)); } cs = cs.OrderByDescending(a => a.w).Take(contourMaxCount).OrderBy(a => a.angle).ToList(); if (cs.Count < 1) { angle = 0; } else if (cs.Count < 2)//use the most lengthy block { angle = cs[0].angle; } else { List <List <int> > dss = new List <List <int> >(); List <int> ds = new List <int>(); for (int i = 1; i < cs.Count; i++) { if (Math.Abs(cs[i].angle - cs[i - 1].angle) < angleMaxDeviation) { ds.Add(i); } else { dss.Add(ds); ds = new List <int>(); } } dss.Add(ds); ds = dss.OrderByDescending(a => a.Count).FirstOrDefault(); if (ds.Count < 1) { angle = 0; } else { // angle = as_[ds.OrderBy(a => Math.Abs(as_[a].angle - as_[a - 1].angle)).FirstOrDefault()].angle; angle = (cs[ds[0] - 1].angle + ds.Sum(a => cs[a].angle)) / (1 + ds.Count); } } if (angle == 0) { return(image3); } RotationMatrix2D rotationMat = new RotationMatrix2D(); CvInvoke.GetRotationMatrix2D(new PointF((float)image3.Width / 2, (float)image3.Height / 2), angle, 1, rotationMat); //image3.ROI = new Rectangle(new Point(offset.Width, offset.Height), image.Size); CvInvoke.WarpAffine(image3, image3, rotationMat, image3.Size, borderValue: marginRgb.MCvScalar); return(image3); }
//public IEnumerable<Mat> FindSetIcons() //{ // List<Mat> results = new List<Mat>(); // using (var contours = new VectorOfVectorOfPoint()) // { // foreach (var angle in CardImage.IconAngles()) // { // foreach (var cannyParameter in CardImage.CannyIconParameters()) // { // Debug.WriteLine("Find Set Icon Image Angle: {0} Canny: {1} ...", angle, cannyParameter); // using (Mat cannyImage = GetCannyImage(() => { return GetGreyImage(angle); }, angle, cannyParameter)) // { // // Find All the Contours // CvInvoke.FindContours(cannyImage, contours, hierarchy: null, mode: RetrType.List, method: ChainApproxMethod.ChainApproxNone); // List<Rectangle> rectangleList = new List<Rectangle>(); // // Create a List of Rectangles From the Contours // for (int idx = 0; idx < contours.Size; idx++) // { // Rectangle rectangle = CvInvoke.MinAreaRect(contours[idx]).MinAreaRect(); // rectangleList.Add(rectangle); // } // // Only add those which fall in the Icon "Zone" // rectangleList = rectangleList.Where(r => CardExpansionSymbolFilter.ExpansionSymbolFilterPass1(_cardFrame, cannyImage.Size, r)).ToList(); // // Output Contoured Image For Debugging // this.FireImageEvent(null, _cardId, _cardId, ImageType.CardContoured, angle: angle, X: 0, Y: 0, // rectangles: rectangleList, color: blue, thickness: 1, cannyParameter: cannyParameter, postFunc: new Action<Mat>((image) => // { // // After Drawing the Rectangles, Add The Margin Lines // CvInvoke.Line(image, new Point((int)(image.Width * CardExpansionSymbolFilter.LeftMargin[_cardFrame]), 0), // new Point((int)(image.Width * CardExpansionSymbolFilter.LeftMargin[_cardFrame]), image.Height), white, thickness: 1); // CvInvoke.Line(image, new Point((int)(image.Width * CardExpansionSymbolFilter.RightMargin[_cardFrame]), 0), // new Point((int)(image.Width * CardExpansionSymbolFilter.RightMargin[_cardFrame]), image.Height), white, thickness: 1); // CvInvoke.Line(image, new Point(0, (int)(image.Height * CardExpansionSymbolFilter.TopMargin[_cardFrame])), // new Point(image.Width, (int)(image.Height * CardExpansionSymbolFilter.TopMargin[_cardFrame])), white, thickness: 1); // CvInvoke.Line(image, new Point(0, (int)(image.Height * CardExpansionSymbolFilter.BottomMargin[_cardFrame])), // new Point(image.Width, (int)(image.Height * CardExpansionSymbolFilter.BottomMargin[_cardFrame])), white, thickness: 1); // })); // // Merge all possible rectangles that could be icons together // var intersectedRectangles = MergedRectangles(rectangleList.ToArray()) // .Distinct() // .Except(rectangleList); // rectangleList.AddRange(intersectedRectangles); // // Make a second pass at filtering to reduce the rectangles into only those // // that could possibly be icons based on aspect ratio, size, etc... // rectangleList = rectangleList.Where(r => CardExpansionSymbolFilter.ExpansionSymbolFilterPass2(_cardFrame, cannyImage.Size, r)).ToList(); // // Sort Each Rectangle From Biggest To Smallest // rectangleList.Sort((r1, r2) => // { // float area1 = r1.Width * r1.Height; // float area2 = r2.Width * r2.Height; // return area2.CompareTo(area1); // }); // // Skip mat creation if there are no rectangles // if (rectangleList.Count > 0) // { // using (Mat image = GetGreyImage(angle)) // { // foreach (Rectangle rectangle in rectangleList) // { // using (Mat cropped = new Mat(image, rectangle)) // { // // Output cropped image for debugging. // Image.FireImageEvent(this, _cardId, _cardId, ImageType.SetIcon, // cropped, angle: angle, X: rectangle.X + (rectangle.Width / 2), Y: rectangle.Y + (rectangle.Height / 2), // cannyParameter: cannyParameter); // results.Add(cropped.Clone()); // } // } // } // } // } // } // } // } // // Sort to biggest first, this make sure FirstDefault() take the most likely one // results.Sort((m1, m2) => // { // float area1 = m1.Size.Height * m1.Size.Width; // float area2 = m2.Size.Height * m2.Size.Width; // return area2.CompareTo(area1); // }); // return results; //} /// <summary> /// Try To Find The Title In the Image /// </summary> /// <param name="cardImage">Image</param> /// <param name="countor">Contour To Test</param> /// <param name="result">Resulting Title Image</param> /// <returns>True If Title Is Found</returns> private static bool TryFindTitle(Guid cardId, Guid cardTitleId, Mat cardImage, double angle, CannyParam cannyParameter, VectorOfPoint countor, out Mat result, out MTGCardFrame cardTitleType) { result = null; cardTitleType = MTGCardFrame.M15; RotatedRect rotatedRect = CvInvoke.MinAreaRect(countor); // Prevent Divide By Zero if (rotatedRect.Size.Height == 0) { return(false); } float width = rotatedRect.Size.Width; float height = rotatedRect.Size.Height; float heightRatio = rotatedRect.Size.Height / (float)cardImage.Size.Height; float relativeCenterY = rotatedRect.Center.Y / (float)cardImage.Size.Height; Rectangle box = rotatedRect.MinAreaRect(); // Prevent Divide By Zero if (box.Size.Width == 0) { return(false); } float widthRatio = (float)box.Size.Width / (float)cardImage.Size.Width; float aspectRatio = (float)box.Size.Height / (float)box.Size.Width; float area = (float)box.Size.Height * (float)box.Size.Width; float imageArea = (float)cardImage.Size.Height * (float)cardImage.Size.Width; float relativeArea = area / imageArea; // Title bar should have a height if (height < 1.0F) { return(false); } // Box Should Be Inside the Image if ((box.Y < 0) || (box.X < 0) || ((box.X + box.Width) > cardImage.Size.Width) || ((box.Y + box.Height) > cardImage.Size.Height)) { return(false); } // Name bar should center in the top 15% of the image, this is the new style cards with the name "boxed" in the image if (relativeCenterY < .15F) { // Title Bar Should Be Wider Than 80% of the Image Width if (widthRatio < .80F) { return(false); } using (Mat cropped = new Mat(cardImage, box)) { Image.FireImageEvent(null, cardId, cardId, ImageType.TitleCropped, cropped, angle: angle, X: box.X, Y: box.Y, cannyParameter: cannyParameter); } // Title Bar Should Be 6% of the Card Height if (heightRatio < .048 || heightRatio > .077) { return(false); } Debug.WriteLine("Title Contour ({14}) - Center: {0}/{1} Relative Center: ({9}%)/({10}%) Width: {2} ({11}%) Height: {3} ({12}%) Area: {4} ({13}%) : AspectRatio: {5}, Angle: {6} Image Size: {7}/{8}", rotatedRect.Center.X, rotatedRect.Center.Y, rotatedRect.Size.Width, rotatedRect.Size.Height, area, aspectRatio, rotatedRect.Angle, cardImage.Size.Width, cardImage.Size.Height, (rotatedRect.Center.X / cardImage.Size.Width) * 100.0, relativeCenterY * 100.0, widthRatio * 100.0, heightRatio * 100.0, relativeArea * 100.0, cardTitleId); using (Mat cropped = new Mat(cardImage, box)) { Image.FireImageEvent(null, cardId, cardTitleId, ImageType.TitleCropped, cropped, angle: angle, X: box.X, Y: box.Y, cannyParameter: cannyParameter); result = cropped.Clone(); return(true); } } else if (relativeCenterY < .50F) { // Assume that this card is the older style card with the name above the photo, but not "boxed" // cardTitleType = MTGCardFrame.Original; // Using the Aspect Ratio Find the Art Box if (aspectRatio < .75F || aspectRatio > .85F) { return(false); } // The art relative area to the card should be above 35% if (relativeArea < .35F) { return(false); } Debug.WriteLine("Title Contour ({14}) - Center: {0}/{1} Relative Center: ({9}%)/({10}%) Width: {2} ({11}%) Height: {3} ({12}%) Area: {4} ({13}%) : AspectRatio: {5}, Angle: {6} Image Size: {7}/{8}", rotatedRect.Center.X, rotatedRect.Center.Y, rotatedRect.Size.Width, rotatedRect.Size.Height, area, aspectRatio, rotatedRect.Angle, cardImage.Size.Width, cardImage.Size.Height, (rotatedRect.Center.X / cardImage.Size.Width) * 100.0, relativeCenterY * 100.0, widthRatio * 100.0, heightRatio * 100.0, relativeArea * 100.0, cardTitleId); int borderHeight = (int)((double)box.Y * .45); // Create a box that is as wide as the art, and directly above the art to // the top of the card Rectangle titleBox = new Rectangle(box.X, borderHeight, box.Width, box.Y - borderHeight); heightRatio = titleBox.Size.Height / (float)cardImage.Size.Height; // Title Bar Should Be 6% of the Card Height if (heightRatio < .050 || heightRatio > .065) { return(false); } using (Mat cropped = new Mat(cardImage, titleBox)) { Image.FireImageEvent(null, cardId, cardId, ImageType.TitleCropped, cropped, angle: angle, X: titleBox.X, Y: titleBox.Y, cannyParameter: cannyParameter); result = cropped.Clone(); return(true); } } return(false); }
public static IEnumerable <Rectangle> DetectSquares(Mat sourceImage) { Mat destinationImage = new Mat(); destinationImage.Create(sourceImage.Rows, sourceImage.Cols, sourceImage.Depth, 1); Mat greyscaleImage = new Mat(); CvInvoke.CvtColor(sourceImage, greyscaleImage, ColorConversion.Bgr2Gray); Mat detectedEdges = new Mat(); CvInvoke.GaussianBlur(greyscaleImage, detectedEdges, new Size(1, 1), 1); CvInvoke.Canny(detectedEdges, detectedEdges, Treshold, Treshold * 3); CvInvoke.Dilate(detectedEdges, detectedEdges, new Mat(), new Point(-1, -1), 3, BorderType.Default, new MCvScalar(255, 255, 255)); //ImageViewer.Show(detectedEdges); List <Rectangle> boxList = new List <Rectangle>(); //List<LineSegment2D> lines = new List<LineSegment2D>(); using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) { CvInvoke.FindContours(detectedEdges, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple); int count = contours.Size; for (int i = 0; i < count; i++) { using (VectorOfPoint approxContour = new VectorOfPoint()) using (VectorOfPoint approx = contours[i]) { CvInvoke.ApproxPolyDP(approx, approxContour, CvInvoke.ArcLength(approx, true) * 0.035, true); Point[] pts = approxContour.ToArray(); LineSegment2D[] edges = PointCollection.PolyLine(pts, true); //lines.AddRange(edges); double contourArea = CvInvoke.ContourArea(approxContour, true); if (contourArea >= 500 && contourArea <= detectedEdges.Width * detectedEdges.Height / 5) { if (approxContour.Size >= 2) { bool isRectangle = true; for (int j = 0; j < edges.Length; j++) { double angle = Math.Abs(edges[(j + 1) % edges.Length] .GetExteriorAngleDegree(edges[j])); if (angle < 85 || angle > 95) { isRectangle = false; break; } } if (isRectangle) { RotatedRect currentRectangle = CvInvoke.MinAreaRect(approxContour); Rectangle minRectangle = currentRectangle.MinAreaRect(); //int ninetyPercentWidth = minRectangle.Width - (int)(minRectangle.Width * 0.05); //int ninetyPercentHeight = minRectangle.Height - (int)(minRectangle.Height * 0.05); //minRectangle.Size = new Size(ninetyPercentWidth, ninetyPercentHeight); //minRectangle.Offset(5, 5); boxList.Add(minRectangle); } } } } } } return(boxList); }
private void operation() { // try // { if (pBxOgn.Image != null && i < path.Length) { //pBxCanny.Image = null; Image <Bgr, byte> img; if (!isCut) { img = new Image <Bgr, byte>((Bitmap)pBxOgn.Image); } else { img = PicCut; } if (img == null) { MessageBox.Show("img==null"); } Image <Gray, byte> dst1 = new Image <Gray, byte>(img.Width, img.Height); Image <Gray, byte> dst2 = new Image <Gray, byte>(img.Width, img.Height); CvInvoke.CvtColor(img, dst1, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray); dst1.SmoothGaussian(3); Image <Gray, byte> dst1th = dst1.CopyBlank(); double cannyAccThresh = CvInvoke.Threshold(dst1, dst1th, 0, 255, ThresholdType.Otsu | ThresholdType.Binary); double cannyThresh = 0.1 * cannyAccThresh; for (int x = 0; x < dst1.Rows; x++) { for (int y = 0; y < dst1.Cols; y++) { if (dst1th.Data[x, y, 0] == 0) { dst1.Data[x, y, 0] = 0; } } } Image <Gray, byte> dst4 = new Image <Gray, byte>(img.Width, img.Height); CvInvoke.AdaptiveThreshold(dst1, dst4, 255, AdaptiveThresholdType.GaussianC, ThresholdType.Binary, 41, 10); dst4.Dilate(100); CvInvoke.Canny(dst4, dst2, 25, 75); //CvInvoke.Canny(dst4, dst2, cannyThresh, cannyAccThresh); pBxOgn.Image = dst4.ToBitmap(); //dst4.Save(path[i]+".jpg"); VectorOfVectorOfPoint con = new VectorOfVectorOfPoint(); Image <Gray, byte> c = new Image <Gray, byte>(img.Width, img.Height); Image <Bgr, byte> dst3 = new Image <Bgr, byte>(img.Width, img.Height); CvInvoke.FindContours(dst2, con, c, RetrType.Ccomp, ChainApproxMethod.ChainApproxSimple); double areaM = dst2.Height * dst2.Width; double areaMin = 200; RotatedRect rRect = new RotatedRect(); Image <Bgr, byte> d; int j = 0; for (int i = 0; i < con.Size; i++) { double area = CvInvoke.ContourArea(con[i]); if (area < areaM) { if (area > areaMin && !con[i].Equals(con[j])) { areaMin = area; j = i; } } } double gotArea = CvInvoke.ContourArea(con[j]); if (gotArea > 4000) { CvInvoke.CvtColor(dst4, dst3, ColorConversion.Gray2Bgr); //CvInvoke.DrawContours(img, con, i, new MCvScalar(0, 0, 255, 255), 2); rRect = CvInvoke.MinAreaRect(con[j]); PointF[] pt = CvInvoke.BoxPoints(rRect); CvInvoke.cvSetImageROI(dst3, rRect.MinAreaRect()); d = new Image <Bgr, byte>(dst3.ROI.Width, dst3.ROI.Height); CvInvoke.cvCopy(dst3, d, IntPtr.Zero); if (d == null) { MessageBox.Show("没有生成图片!"); } else { pBxCanny.Image = d.ToBitmap(); rtbOCR.Text += ("\n!" + gotArea + "!\n"); isCut = false; } } else { rtbOCR.Text += ("\n<" + gotArea + ">\n"); } } else { MessageBox.Show("没有原图片!"); } }
private static void DetectRectangles(Image <Bgr, byte> image) { UMat uImage = new UMat(); CvInvoke.CvtColor(image, uImage, ColorConversion.Bgr2Gray); UMat pyrDown = new UMat(); CvInvoke.PyrDown(uImage, pyrDown); CvInvoke.PyrUp(pyrDown, uImage); double cannyThresholdLinking = 3.0; double cannyThreshold = 100.0; UMat cannyEdges = new UMat(); CvInvoke.Canny(uImage, cannyEdges, cannyThreshold, cannyThresholdLinking); ImageViewer.Show(cannyEdges); LineSegment2D[] lines = CvInvoke.HoughLinesP( cannyEdges, 1, //Distance resolution in pixel-related units Math.PI / 45.0, //Angle resolution measured in radians. 20, //threshold 30, //min Line width 10); //gap between lines List <Rectangle> boxList = new List <Rectangle>(); using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) { CvInvoke.FindContours(cannyEdges, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple); int count = contours.Size; for (int i = 0; i < count; i++) { using (VectorOfPoint contour = contours[i]) using (VectorOfPoint approxContour = new VectorOfPoint()) { CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, true) * 0.05, true); if (CvInvoke.ContourArea(approxContour, true) > 150) { if (approxContour.Size == 4) { bool isRectangle = true; Point[] pts = approxContour.ToArray(); LineSegment2D[] edges = PointCollection.PolyLine(pts, true); for (int j = 0; j < edges.Length; j++) { double angle = Math.Abs(edges[(j + 1) % edges.Length] .GetExteriorAngleDegree(edges[j])); if (angle < 80 || angle > 100) { isRectangle = false; break; } } if (isRectangle) { RotatedRect currentRectangle = CvInvoke.MinAreaRect(approxContour); Rectangle minRectangle = currentRectangle.MinAreaRect(); int ninetyPercentWidth = minRectangle.Width - (int)(minRectangle.Width * 0.1); int ninetyPercentHeight = minRectangle.Height - (int)(minRectangle.Height * 0.1); minRectangle.Size = new Size(ninetyPercentWidth, ninetyPercentHeight); minRectangle.Offset(5, 5); boxList.Add(minRectangle); } } } } } } for (int i = 0; i < boxList.Count; i++) { var rotatedRect = boxList[i]; image.Draw(rotatedRect, new Bgr(Color.DarkOrange), 2); } //foreach (RotatedRect rotatedRect in boxList) //{ // if (rotatedRect.Angle > -90 && rotatedRect.Angle < -85) // { // rotatedRect.Angle = -90; // } // image.Draw(rotatedRect, new Bgr(Color.DarkOrange), 2); //} ImageViewer.Show(image); }
private void pBxOgnOperation() { // try // { if (pBxOgn.Image != null && i < path.Length) { //pBxCanny.Image = null; Image <Bgr, byte> img; if (!isCut) { img = new Image <Bgr, byte>((Bitmap)pBxOgn.Image); } else { img = PicCut; } if (img == null) { MessageBox.Show("img==null"); } Image <Gray, byte> dst1 = new Image <Gray, byte>(img.Width, img.Height); UMat dst2 = new UMat();//Image<Gray, byte> dst2 = new Image<Gray, byte>(img.Width, img.Height); CvInvoke.CvtColor(img, dst1, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray); dst1.SmoothGaussian(3); Image <Gray, byte> savedst1 = dst1.Copy(); for (int x = 0; x < dst1.Rows; x++) { //dst1.Data[x, dst1.Cols, 0] = 0; //dst1.Data[x, 0, 0] = 0; for (int y = 0; y < dst1.Cols; y++) { if (dst1.Data[x, y, 0] <= 50) { dst1.Data[x, y, 0] = 0; } else if (dst1.Data[x, y, 0] >= 200) { dst1.Data[x, y, 0] = 255; } } } dst1.SmoothGaussian(11); Image <Gray, byte> dst1th = dst1.CopyBlank(); double cannyAccThresh = CvInvoke.Threshold(dst1, dst1th, 120, 255, ThresholdType.Otsu | ThresholdType.Binary); double cannyThresh = 0.1 * cannyAccThresh; for (int x = 0; x < dst1.Rows; x++) { for (int y = 0; y < dst1.Cols; y++) { if (dst1th.Data[x, y, 0] == 0) { dst1.Data[x, y, 0] = 0; } } } //CvInvoke.Canny(dst1, dst2, 75, 50); Image <Gray, byte> dst4 = new Image <Gray, byte>(img.Width, img.Height); CvInvoke.AdaptiveThreshold(dst1th, dst4, 255, AdaptiveThresholdType.GaussianC, ThresholdType.Binary, 55, 10); dst4.Dilate(100); CvInvoke.Canny(dst4, dst2, cannyThresh, cannyAccThresh); pBxOgn.Image = dst4.ToBitmap(); LineSegment2D[] lines = CvInvoke.HoughLinesP(dst2, 1, Math.PI / 90.0, 100, 1, 100); List <RotatedRect> boxlist = new List <RotatedRect>(); using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) { CvInvoke.FindContours(dst2, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple); int count = contours.Size; for (int i = 0; i < count; i++) { using (VectorOfPoint contour = contours[i]) { using (VectorOfPoint approxContour = new VectorOfPoint()) { CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, false) * 0.05, false); if (CvInvoke.ContourArea(approxContour, false) > img.Size.Height * img.Size.Width * 0.001) { if (approxContour.Size >= 4) //The contour has 4 vertices. { //#region determine if all the angles in the contour are within [80, 100] degree //bool isRectangle = true; //Point[] pts = approxContour.ToArray(); //LineSegment2D[] edges = PointCollection.PolyLine(pts, true); //for (int j = 0; j < edges.Length; j++) //{ // double angle = Math.Abs( // edges[(j + 1) % edges.Length].GetExteriorAngleDegree(edges[j])); // if (angle < 80 || angle > 100) // { // isRectangle = false; // break; // } //} //#endregion //if (isRectangle) boxlist.Add(CvInvoke.MinAreaRect(approxContour)); } else { rtbOCR.Text += ("no" + i + " "); } } } } } } Image <Bgr, Byte> toDraw = dst4.CopyBlank().Convert <Bgr, Byte>().Not(); //Image<Bgr, Byte> toDraw2 = dst1.Copy().Convert<Bgr,Byte>().Not(); RotatedRect maxrect = new RotatedRect(); double maxarea = 1000; Image <Gray, Byte> getLine = dst4.CopyBlank(); foreach (RotatedRect rect in boxlist) { if (rect.Size.Width * rect.Size.Height > maxarea) { maxarea = rect.Size.Width * rect.Size.Height; maxrect = rect; } //toDraw.Draw(rect, new Bgr(Color.Red), 9); } //foreach (LineSegment2D line in lines) //{ // getLine.Draw(line, new Gray(255), 9); // toDraw2.Draw(line, new Bgr(Color.Blue), 5); //} //getLine.Draw(maxrect, new Gray(255), 9); //pbx.Image = toDraw2.ToBitmap(); //List<RotatedRect> boxlist2 = new List<RotatedRect>(); //using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) //{ // CvInvoke.FindContours(getLine, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple); // int count = contours.Size; // for (int i = 0; i < count; i++) // { // using (VectorOfPoint contour = contours[i]) // { // using (VectorOfPoint approxContour = new VectorOfPoint()) // { // CvInvoke.ApproxPolyDP(contour, approxContour, CvInvoke.ArcLength(contour, false) * 0.05, false); // if (CvInvoke.ContourArea(approxContour, false) > img.Size.Height * img.Size.Width * 0.001) // if (approxContour.Size >= 4) //The contour has 4 vertices. // { // boxlist2.Add(CvInvoke.MinAreaRect(approxContour)); // } // else // { // rtbOCR.Text += ("no" + i + " "); // } // } // } // } //} //RotatedRect maxrect2 = new RotatedRect(); //maxarea = 1000;//img.Height * img.Width * 0.5 * 0.5; //foreach (RotatedRect rect in boxlist2) //{ // if (rect.Size.Width * rect.Size.Height > maxarea&&rect.Size.Width * rect.Size.Height<=img.Width*img.Height*0.99) // { // maxarea = rect.Size.Width * rect.Size.Height; // maxrect2 = rect; // } // toDraw.Draw(rect, new Bgr(Color.Yellow), 9); //} //PointF[] vertical = maxrect.GetVertices(); //toDraw.Draw(maxrect, new Bgr(Color.Red), 4); //toDraw.Draw(maxrect2, new Bgr(Color.Black), 4); Image <Gray, byte> dst5 = new Image <Gray, byte>(img.Width, img.Height); CvInvoke.AdaptiveThreshold(savedst1, dst5, 255, AdaptiveThresholdType.GaussianC, ThresholdType.Binary, 77, 10); dst5.SmoothGaussian(31); dst5.Erode(5); pbx.Image = dst5.ToBitmap(); //CvInvoke.AbsDiff(dst5, dst4, dst5); CvInvoke.cvSetImageROI(dst5, maxrect.MinAreaRect()); Image <Gray, Byte> imgRoi = new Image <Gray, byte>(dst5.ROI.Width, dst5.ROI.Height); CvInvoke.cvCopy(dst5, imgRoi, IntPtr.Zero); pBxCanny.Image = imgRoi.ToBitmap(); ocrGet(); } else { MessageBox.Show("没有原图片!"); } }
private void loadLibrary(Mat image, int color) { Mat gray = new Mat(), blur = new Mat(), thresh = new Mat(); CvInvoke.CvtColor(image, gray, ColorConversion.Bgr2Gray); CvInvoke.GaussianBlur(gray, blur, new System.Drawing.Size(1, 1), 1000); CvInvoke.Threshold(blur, thresh, 200, 255, ThresholdType.Binary); Mat hierarchy = new Mat(); VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); CvInvoke.FindContours(thresh, contours, hierarchy, RetrType.Tree, ChainApproxMethod.ChainApproxNone); List <RotatedRect> foundCards = new List <RotatedRect>(); for (int i = 0; i < contours.Size; ++i) { int j = i; while (j > 0 && CvInvoke.ContourArea(contours[j - 1], false) < CvInvoke.ContourArea(contours[j], false)) { VectorOfPoint tmp = new VectorOfPoint(); tmp.Push(contours[j]); contours[j].Clear(); contours[j].Push(contours[j - 1]); contours[j - 1].Clear(); contours[j - 1].Push(tmp); j--; } } Image <Bgr, byte> tst = new Image <Bgr, byte>(image.Bitmap); int numCards = 0; for (int i = 0; i < contours.Size && numCards < 13; ++i) { VectorOfPoint card = contours[i]; double area = CvInvoke.ContourArea(card, false); double peri = CvInvoke.ArcLength(card, true); VectorOfPoint approx = new VectorOfPoint(); CvInvoke.ApproxPolyDP(card, approx, 0.02 * peri, true); RotatedRect rect = CvInvoke.MinAreaRect(card); System.Drawing.PointF[] r = CvInvoke.BoxPoints(rect); bool stop = false; for (int j = 0; j < foundCards.Count; ++j) { RotatedRect crd = foundCards[j]; System.Drawing.PointF center = crd.Center; if (rect.MinAreaRect().Left < center.X && rect.MinAreaRect().Right > center.X && rect.MinAreaRect().Top < center.Y && rect.MinAreaRect().Bottom > center.Y) { stop = true; } } if (stop) { continue; } numCards++; System.Drawing.PointF[] points = new System.Drawing.PointF[4], points2 = new System.Drawing.PointF[4]; points[0] = new System.Drawing.PointF(0, 0); points[1] = new System.Drawing.PointF(799, 0); points[2] = new System.Drawing.PointF(799, 799); points[3] = new System.Drawing.PointF(0, 799); //ImageViewer asd2 = new ImageViewer(thresh); //asd2.ShowDialog(); for (int j = 0; j < approx.Size && j < 4; ++j) { points2[j] = approx[j]; } Array.Sort(points2, (a, b) => (int)(a.Y - b.Y)); if (points2[0].X < points2[1].X) { System.Drawing.PointF tmp = points2[0]; points2[0] = points2[1]; points2[1] = tmp; } if (points2[2].X > points2[3].X) { System.Drawing.PointF tmp = points2[2]; points2[2] = points2[3]; points2[3] = tmp; } Mat transform = CvInvoke.GetPerspectiveTransform(points2, points); Mat warp = new Mat(); CvInvoke.WarpPerspective(blur, warp, transform, new System.Drawing.Size(800, 800)); switch (numCards) { case 1: library.Add(new Card(warp, color, 10)); break; case 2: library.Add(new Card(warp, color, 11)); break; case 3: library.Add(new Card(warp, color, 12)); break; case 4: library.Add(new Card(warp, color, 13)); break; case 5: library.Add(new Card(warp, color, 5)); break; case 6: library.Add(new Card(warp, color, 6)); break; case 7: library.Add(new Card(warp, color, 7)); break; case 8: library.Add(new Card(warp, color, 8)); break; case 9: library.Add(new Card(warp, color, 9)); break; case 10: library.Add(new Card(warp, color, 1)); break; case 11: library.Add(new Card(warp, color, 2)); break; case 12: library.Add(new Card(warp, color, 3)); break; case 13: library.Add(new Card(warp, color, 4)); break; } foundCards.Add(rect); } }