/// <summary> /// Gets contours inside a point /// </summary> /// <param name="contours"></param> /// <param name="hierarchy"></param> /// <param name="location"></param> /// <param name="includeLimitingArea">If true it will include all limiting area, otherwise only outer contour will be returned</param> /// <returns></returns> public static VectorOfVectorOfPoint GetContoursInside(VectorOfVectorOfPoint contours, int[,] hierarchy, Point location, bool includeLimitingArea = true) { var vector = new VectorOfVectorOfPoint(); var vectorSize = contours.Size; for (var i = vectorSize - 1; i >= 0; i--) { if (CvInvoke.PointPolygonTest(contours[i], location, false) < 0) { continue; } vector.Push(contours[i]); if (!includeLimitingArea) { break; } for (int n = i + 1; n < vectorSize; n++) { if (hierarchy[n, EmguContour.HierarchyParent] != i) { continue; } vector.Push(contours[n]); } break; } return(vector); }
public static VectorOfVectorOfPoint FilterAllContours(VectorOfVectorOfPoint source) { VectorOfVectorOfPoint result = new VectorOfVectorOfPoint(); bool ready = false; VectorOfVectorOfPoint contours = source; double[] Areas = new double[contours.Size]; for (int i = 0; i < contours.Size; i++) { Areas[i] = CvInvoke.ContourArea(contours[i]); } for (int i = 0; i < contours.Size && !ready; i++) { using (VectorOfPoint contour = contours[i]) { for (int j = 1; j < contours.Size && !ready; j++) { if (Math.Abs(Areas[i] / Areas[j] - 0.33) < 0.1) { result.Push(contours[j]); result.Push(contours[i]); ready = !ready; } } } } return(result); }
public static VectorOfVectorOfPoint Detect(IImage eye, int BinaryValue = 140, FaceParams MainFace = null) { Image <Gray, Byte> _eye = new Image <Gray, Byte>(eye.Bitmap); _eye = _eye.ThresholdBinary(new Gray(BinaryValue), new Gray(255)); VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); Mat Heir = new Mat(); CvInvoke.FindContours(_eye, contours, Heir, RetrType.External, ChainApproxMethod.ChainApproxSimple); VectorOfVectorOfPoint pupilAreas = new VectorOfVectorOfPoint(); if (MainFace != null) { MainFace.RightEyeImg = _eye; } for (int i = 0; i < contours.Size; i++) { double area = CvInvoke.ContourArea(contours[i]); if (area < 50 && area > 5) { pupilAreas.Push(contours[i]); } } return(pupilAreas); }
private static VectorOfVectorOfPoint FindBiggestContour(Mat image) { var biggest = new VectorOfPoint(); double max_area = 0; var contours = new VectorOfVectorOfPoint(); var biggestContour = new VectorOfVectorOfPoint(); var hierarchy = new Mat(); CvInvoke.FindContours(image, contours, hierarchy, RetrType.Tree, ChainApproxMethod.ChainApproxSimple); for (int i = 0; i < contours.Size; ++i) { var area = CvInvoke.ContourArea(contours[i]); if (area > 100) { var peri = CvInvoke.ArcLength(contours[i], true); CvInvoke.ApproxPolyDP(contours[i], contours[i], 0.02 * peri, true); //Aproximate to ideal lines if (area > max_area) { biggest = contours[i]; max_area = area; } } } biggestContour.Push(biggest); return(biggestContour); }
/// <summary> /// This function will take a segmented grayscale image and the likeliest candidates for hands. /// They are chosen as the two largest contours with a size of at least 10'000 pixels. /// </summary> /// <param name="inputImage">Already segmented grayscale image.</param> /// <param name="pixelThreshold">Number of pixels required to be counted as a hand.</param> /// <param name="numberOfContours">The n largest contours which will be picked from the list.</param> /// <returns>Vector of contours</returns> public static VectorOfVectorOfPoint LargestContours(Image <Gray, byte> inputImage, int pixelThreshold = PixelThreshold, int numberOfContours = NumberOfContours) { VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); VectorOfVectorOfPoint sortedContours = new VectorOfVectorOfPoint(); Mat hierarchyMat = new Mat(); CvInvoke.FindContours(inputImage, contours, hierarchyMat, RetrType.Tree, ChainApproxMethod.ChainApproxNone); if (contours.Size > 0) { Dictionary <VectorOfPoint, double> contourDict = new Dictionary <VectorOfPoint, double>(); for (int i = 0; i < contours.Size; i++) { double contourArea = CvInvoke.ContourArea(contours[i]); contourDict.Add(contours[i], contourArea); } var orderedDict = contourDict.OrderByDescending(area => area.Value).TakeWhile(area => area.Value > pixelThreshold); if (orderedDict.Count() > numberOfContours) { orderedDict = orderedDict.Take(numberOfContours); } foreach (var contour in orderedDict) { sortedContours.Push(contour.Key); } } hierarchyMat.Dispose(); contours.Dispose(); return(sortedContours); }
public static List <Point> FindQuadroPoints(VectorOfPoint Input) { VectorOfVectorOfPoint drawable = new VectorOfVectorOfPoint(); Image <Gray, Byte> img = new Image <Gray, Byte>(184, 140, new Gray(255)); drawable.Push(Input); CvInvoke.DrawContours(img, drawable, -1, new MCvScalar(0), 1, LineType.EightConnected); //поиск точек с четыремя соседями. небольшая оптимизация подсчета благодаря ВB Rectangle BB = CvInvoke.BoundingRectangle(Input); List <Point> quadro = new List <Point>(); for (int y = BB.Y; y < BB.Y + BB.Height; y++) { for (int x = BB.X; x < BB.X + BB.Width; x++) { if ((img[y - 1, x].Intensity == 0) && (img[y + 1, x].Intensity == 0) && (img[y, x - 1].Intensity == 0) && (img[y, x + 1].Intensity == 0)) { quadro.Add(new Point(x, y)); } } } return(quadro); }
private VectorOfVectorOfPoint FilterContours(Mat _webcamFrame) { VectorOfVectorOfPoint vectListMarkers = new VectorOfVectorOfPoint(); for (int i = 0; i < contours.Size; i++) { double contourArea = CvInvoke.ContourArea(contours[i], false); double contourAreaProportion = contourArea / (_webcamFrame.Height * _webcamFrame.Width); if (contourAreaProportion < _minContourAreaProportion || contourAreaProportion > _maxContourAreaProportion) { continue; } RotatedRect rect = CvInvoke.MinAreaRect(contours[i]); double squareShapeComparison = rect.Size.Width / rect.Size.Height; if (squareShapeComparison < (1.0 - _squareShapeTolerance) || squareShapeComparison > (1.0 + _squareShapeTolerance)) { continue; } double areaComparison = contourArea / (rect.Size.Width * rect.Size.Height); if (areaComparison < (1.0 - _areaRectTolerance) || areaComparison > (1.0 + _areaRectTolerance)) { continue; } vectListMarkers.Push(contours[i]); } return(vectListMarkers); }
/// <summary> /// Векторизация изображения /// </summary> public void MakeVectorization() { if (editedImage == null) { editedImage = new Image <Bgr, byte>(inputImage.Size); } Image <Gray, byte> grayImage = inputImage.SmoothGaussian(5).Convert <Gray, byte>().ThresholdBinaryInv(new Gray(255 - Settings.Sensitivity), new Gray(255)); Mat hierarchy = new Mat(); VectorOfVectorOfPoint currentContourList = new VectorOfVectorOfPoint(); CvInvoke.FindContours(grayImage, currentContourList, hierarchy, Emgu.CV.CvEnum.RetrType.Tree, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple); for (int i = 0; i < currentContourList.Size; i++) { double perimeter = CvInvoke.ArcLength(currentContourList[i], true); double area = CvInvoke.ContourArea(currentContourList[i]); var approximation = MakeApproximation(currentContourList[i]); if (approximation.Count * Settings.ApproximationCoefficient > area && Settings.ApproximationCoefficientUsed) { currentContourList[i].Clear(); continue; } if (perimeter * Settings.LineCoefficient > area && Settings.LineCoefficientUsed) { currentContourList[i].Clear(); continue; } Bgr contourColor = inputImage[approximation.First().Y, approximation.First().X]; contourListResult.Add(new Contour("Contour " + (i + 1), approximation, "текст легенды", contourColor)); contourList.Push(currentContourList[i]); } }
private VectorOfVectorOfPoint CalculateContours(Image <Gray, byte> img, double thresholdarea = 1000) { try { VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); Mat h = new Mat(); CvInvoke.FindContours(img, contours, h, Emgu.CV.CvEnum.RetrType.External, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple); VectorOfVectorOfPoint filteredContours = new VectorOfVectorOfPoint(); for (int i = 0; i < contours.Size; i++) { var area = CvInvoke.ContourArea(contours[i]); if (area >= thresholdarea) { filteredContours.Push(contours[i]); } } return(filteredContours); } catch (Exception ex) { throw new Exception(ex.Message); } }
/// <summary> /// Classify contours by size and variance of intensities. /// </summary> /// <param name="uMatChannel">original image</param> /// <param name="contours">all contours</param> /// <param name="contoursRelevant">relevant contours</param> /// <param name="nonRelevantPixelSize">non relevant contour pixel size</param> private void ClassifyContours(UMat uMatChannel, ref VectorOfVectorOfPoint contours, ref VectorOfVectorOfPoint contoursRelevant, int nonRelevantPixelSize) { Dictionary <int, double> variances = new Dictionary <int, double>(); for (int i = 0; i < contours.Size; i++) { double area = CvInvoke.ContourArea(contours[i]); if (area > nonRelevantPixelSize) { double variance = CalculateVariance(uMatChannel, contours[i]); variances.Add(i, variance); } } // set percentile to 1.0 to account all contours double variancePercentile = GetPercentile(variances.Select(it => it.Value).ToArray(), 0.85); for (int i = 0; i < variances.Count; i++) { if (variances.ElementAt(i).Value < variancePercentile) { contoursRelevant.Push(contours[variances.ElementAt(i).Key]); } } }
public static List <Rectangle> getRectsByColorHsv(Image <Bgr, byte> original, Hsv lower, Hsv higher) { List <Rectangle> rectangles = new List <Rectangle>(); var filtered = ColorFilterer.filterByHsv(original, lower, higher); VectorOfVectorOfPoint contoursDetected = new VectorOfVectorOfPoint(); CvInvoke.FindContours(filtered, contoursDetected, null, Emgu.CV.CvEnum.RetrType.List, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple); VectorOfVectorOfPoint contoursArray = new VectorOfVectorOfPoint(); int count = contoursDetected.Size; for (int i = 0; i < count; i++) { using (VectorOfPoint currContour = contoursDetected[i]) { if (currContour.Size > 50) { Rectangle rect = CvInvoke.BoundingRectangle(currContour); contoursArray.Push(currContour); rectangles.Add(rect); } } } return(rectangles); }
public static List <NeuronBodyMask> GenerateNeuronBodyMasks(List <VectorOfPoint> input) { List <NeuronBodyMask> result = new List <NeuronBodyMask>(); NeuronBodyMask tmp_mask = new NeuronBodyMask(); Rectangle R = new Rectangle(); Image <Gray, Byte> maskImg = new Image <Gray, Byte>(1, 1, new Gray(0)); Point C = new Point(); VectorOfVectorOfPoint tmp_vvop = new VectorOfVectorOfPoint(); MCvMoments moments = new MCvMoments(); for (int i = 0; i < input.Count; i++) { R = CvInvoke.BoundingRectangle(input[i]); maskImg = new Image <Gray, byte>(184, 140, new Gray(0)); tmp_vvop = new VectorOfVectorOfPoint(); tmp_vvop.Push(input[i]); CvInvoke.DrawContours(maskImg, tmp_vvop, -1, new MCvScalar(255), -1, LineType.EightConnected); moments = CvInvoke.Moments(input[i]); C = new Point((int)(moments.M10 / moments.M00), (int)(moments.M01 / moments.M00)); //maskImg.Draw(new Point[] { C }, new Gray(100), 1); tmp_mask = new NeuronBodyMask(R, maskImg, C); result.Add(tmp_mask); } return(result); }
private VectorOfVectorOfPoint FindWaterContours(IInputOutputArray input) { VectorOfVectorOfPoint waterContours = new VectorOfVectorOfPoint(); using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) using (Mat hierachy = new Mat()) { FindContours(input, contours, hierachy, RetrType.Tree, ChainApproxMethod.ChainApproxNone); for (int contourIndex = 0; contourIndex < contours.Size; contourIndex++) { double area = ContourArea(contours[contourIndex], false); double perimeter = ArcLength(contours[contourIndex], true); double formfactor = 4 * Math.PI * area / (perimeter * perimeter); if (area > AreaThreshold && formfactor > FormfactorThreshold) { var moments = Moments(contours[contourIndex], true); double x = moments.M20 + moments.M02; double y = 4 * Math.Pow(moments.M11, 2) + Math.Pow((moments.M20 - moments.M02), 2); double elongation = (x + Math.Pow(y, 0.5)) / (x - Math.Pow(y, 0.5)); Console.WriteLine($"formfactor: {formfactor} elongation: {elongation}"); waterContours.Push(contours[contourIndex]); } } } return(waterContours); }
public void HandleWebcamQueryFrame(object sender, EventArgs e) { if (webcam.IsOpened) { webcam.Retrieve(image); } if (image.IsEmpty) { return; } imageGray = image.Clone(); CvInvoke.CvtColor(image, imageGray, ColorConversion.Bgr2Gray); if (imageGray.IsEmpty) { return; } frontFaces = frontFaceCascadeClassifier.DetectMultiScale(image: imageGray, scaleFactor: 1.1, minNeighbors: 5, minSize: new Size(MIN_FACE_SIZE, MIN_FACE_SIZE), maxSize: new Size(MAX_FACE_SIZE, MAX_FACE_SIZE)); Debug.Log(frontFaces.Length.ToString()); for (int i = 0; i < frontFaces.Length; i++) { CvInvoke.Rectangle(image, frontFaces[i], new MCvScalar(0, 180, 0), 0); Debug.Log("i: " + i.ToString()); } //Nouvelle matrice qui focus sur le premier visage if (frontFaces.Length > 0) { image = new Mat(image, frontFaces[0]); } DisplayFrame(image); //Seuillage adaptatif Mat hierarchy = new Mat(); CvInvoke.AdaptiveThreshold(imageGray, imageGray, maxValue, AdaptiveThresholdType.MeanC, ThresholdType.Binary, blockSize, diviser); CvInvoke.FindContours(imageGray, allContours, hierarchy, RetrType.List, ChainApproxMethod.ChainApproxNone); desiredContours.Clear(); for (int i = 0; i < allContours.Size; i++) { if (CvInvoke.ContourArea(allContours[i]) > contourSizeMin && CvInvoke.ContourArea(allContours[i]) < contourSizeMax) { desiredContours.Push(allContours[i]); } } CvInvoke.DrawContours(image, desiredContours, -1, new MCvScalar(200, 100, 200), 2); //RotatedRect rotatedRect; //rotatedRect = CvInvoke.MinAreaRect(biggestContour); //rotatedRect.GetVertices(); CvInvoke.Imshow("Webcam view Normal", image); CvInvoke.Imshow("Webcam view Gray", imageGray); }
private void ShowDetail() { int id = -1; this.Dispatcher.Invoke(() => { id = cbFOV.SelectedIndex; }); System.Drawing.Rectangle ROI = mModel.Gerber.FOVs[id].ROI; this.Dispatcher.Invoke(() => { txtROIX.Text = ROI.X.ToString(); txtROIY.Text = ROI.Y.ToString(); txtROIWidth.Text = ROI.Width.ToString(); txtROIHeight.Text = ROI.Height.ToString(); }); if (mImage != null) { var modelFov = mModel.FOV; System.Drawing.Rectangle ROIGerber = new System.Drawing.Rectangle( mAnchorROIGerber[id].X - modelFov.Width / 2, mAnchorROIGerber[id].Y - modelFov.Height / 2, modelFov.Width, modelFov.Height); mImage.ROI = ROI; using (Image <Bgr, byte> imgGerberBgr = new Image <Bgr, byte>(ROIGerber.Size)) using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint()) { for (int i = 0; i < mModel.Gerber.PadItems.Count; i++) { PadItem item = mModel.Gerber.PadItems[i]; if (item.FOVs.Count > 0) { if (item.FOVs[0] == id) { System.Drawing.Point[] cntPointSub = new System.Drawing.Point[item.ContourAdjust.Length]; for (int j = 0; j < cntPointSub.Length; j++) { cntPointSub[j] = new System.Drawing.Point(item.ContourAdjust[j].X - ROIGerber.X, item.ContourAdjust[j].Y - ROIGerber.Y); } contours.Push(new VectorOfPoint(cntPointSub)); } } } CvInvoke.DrawContours(imgGerberBgr, contours, -1, new MCvScalar(255), -1); CvInvoke.AddWeighted(imgGerberBgr, 0.5, mImage, 0.5, 1, imgGerberBgr); this.Dispatcher.Invoke(() => { BitmapSource bms = Utils.Convertor.Bitmap2BitmapSource(imgGerberBgr.Bitmap); imb.Source = bms; }); } GC.Collect(); GC.WaitForPendingFinalizers(); } else { mLog.Info(string.Format("Cant Capture image in FOV : {0}", id + 1)); } }
/// <summary> /// 保留最大联通 /// </summary> /// <param name="inMat">输入图片:Mat</param> /// <param name="Num">要保留的最大区域数量</param> /// <returns></returns> public static Mat MaxRegion(Mat inMat, int Num) { Image <Bgr, byte> src = new Image <Bgr, byte>(inMat.Size); src = inMat.ToImage <Bgr, byte>(); Image <Gray, byte> edges = new Image <Gray, byte>(src.Width, src.Height); Image <Gray, byte> hierarchy = new Image <Gray, byte>(src.Width, src.Height); CvInvoke.Canny(src, edges, 100, 60); VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); //轮廓寻找 CvInvoke.FindContours(edges, contours, hierarchy, RetrType.External, ChainApproxMethod.ChainApproxNone); //轮廓检出到数组,计算面积准备排序 ContourItem[] ContourArray = new ContourItem[contours.Size]; for (int i = 0; i < contours.Size; i++) { ContourArray[i].vp = contours[i]; ContourArray[i].area = CvInvoke.ContourArea(ContourArray[i].vp); } //排序 for (int i = 1; i < ContourArray.Length - 1; i++) { bool flag = true; for (int j = 0; j < ContourArray.Length - 1 - i; j++) { if (ContourArray[j].area < ContourArray[j + 1].area) { ContourItem temp = ContourArray[j]; ContourArray[j] = ContourArray[j + 1]; ContourArray[j + 1] = temp; flag = false; } } if (flag) { break; } } //保留前N个轮廓,把剩下的填充 if (ContourArray.Length == contours.Size) { Console.WriteLine("相等" + contours.Size); } for (int i = Num; i < ContourArray.Length; i++) { //只填充面积>0的轮廓 if (ContourArray[i].area > 0) { VectorOfVectorOfPoint tempVectorOfPoint = new VectorOfVectorOfPoint(); tempVectorOfPoint.Push(ContourArray[i].vp); CvInvoke.FillPoly(src, tempVectorOfPoint, new MCvScalar(255, 0, 255, 255)); } } return(src.Mat); }
private VectorOfVectorOfPoint GetVVP(Point[] points) { VectorOfVectorOfPoint vvp = new VectorOfVectorOfPoint(); VectorOfPoint vp = new VectorOfPoint(); vp.Push(points); vvp.Push(vp); return(vvp); }
private Tuple <Bitmap, int> CreatePreviewImage(string imgPath, double val) { var q1 = new Image <Bgr, byte>(imgPath); var w3 = ExtensionMethods.FindContours (q1.Copy().Convert <Gray, byte>().GaussBlur().AdaptiveThreshold().Dilate(8).Erode()); var avg = ExtensionMethods.CalculateAvreage(w3.Item1, val); var e4 = new VectorOfVectorOfPoint(); for (var i = 0; i < w3.Item1.Size; i++) { if (CvInvoke.ContourArea(w3.Item1[i]) > avg) { e4.Push(w3.Item1[i]); } } var boundRect = new List <Rectangle>(); for (var i = 0; i < e4.Size; i++) { boundRect.Add(CvInvoke.BoundingRectangle(e4[i])); } var puzzelCounter = 0; var avgX = new int[boundRect.Count]; var avgY = new int[boundRect.Count]; foreach (var r in boundRect) { avgX[puzzelCounter] = r.X; avgY[puzzelCounter] = r.Y; puzzelCounter++; q1 = q1.Rectangle (r, new MCvScalar(250, 0, 250)) .PutText ( puzzelCounter.ToString() , new Point(r.X + r.Width / 2, r.Y + r.Height / 2) , new MCvScalar(255, 0, 255) , FontFace.HersheySimplex , 10 , 20); } var assumedConfiguration = ExtensionMethods.AssumePuzzleConfiguration(avgX, avgY); X_axis.Value = assumedConfiguration[1]; Y_axis.Value = assumedConfiguration[0]; return(new Tuple <Bitmap, int>(q1.ToBitmap(), puzzelCounter)); }
public static VectorOfVectorOfPoint ListOfVOPtoVVOP(List <VectorOfPoint> input) { VectorOfVectorOfPoint res = new VectorOfVectorOfPoint(); foreach (var I in input) { res.Push(I); } return(res); }
private Tuple <int, int> Traitement(Mat m, Mat structure, Mat output) { //filtre median Mat binaryMatFiltered = new Mat(); binaryMatFiltered = MedianFilter(m); //erosion dilatation Mat fermetureMat = Fermeture(binaryMatFiltered, structure); //contours VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); VectorOfVectorOfPoint desiredContours = new VectorOfVectorOfPoint(); Mat hierarchy = new Mat(); CvInvoke.FindContours(fermetureMat, contours, hierarchy, RetrType.List, ChainApproxMethod.ChainApproxNone); desiredContours.Clear(); for (int i = 0; i < contours.Size; i++) { if (CvInvoke.ContourArea(contours[i]) > contourSizeMin && CvInvoke.ContourArea(contours[i]) < contourSizeMax) { desiredContours.Push(contours[i]); } } //recherche de contour VectorOfPoint biggest_contour = new VectorOfPoint(); int biggest_contour_index; double biggest_contour_area = 0; //plus gros contour for (int i = 0; i < desiredContours.Size; i++) { if (CvInvoke.ContourArea(desiredContours[i]) > biggest_contour_area) { biggest_contour = desiredContours[i]; biggest_contour_index = i; biggest_contour_area = CvInvoke.ContourArea(desiredContours[i]); } } //centroid var moments = CvInvoke.Moments(biggest_contour); int cx = (int)(moments.M10 / moments.M00); int cy = (int)(moments.M01 / moments.M00); Point centroid = new Point(cx, cy); CvInvoke.DrawContours(output, desiredContours, -1, new MCvScalar(150), 3); CvInvoke.Circle(output, centroid, 5, new MCvScalar(150), 3); return(Tuple.Create(cx, cy)); }
public static VectorOfVectorOfPoint getApproxContour(VectorOfVectorOfPoint vectors) { var approxContours = new VectorOfVectorOfPoint(); for (int i = 0; i < vectors.Size; i++) { var contour = new VectorOfPoint(); CvInvoke.ApproxPolyDP(vectors[i], contour, CvInvoke.ArcLength(vectors[i], true) * 0.05, true); approxContours.Push(contour); } return(approxContours); }
private void ProcessFrame(object sender, EventArgs e) { if (_capture != null && _capture.Ptr != IntPtr.Zero) { _capture.Retrieve(frame, 0); gpuFrame.Upload(frame); cudaBgMOG2.Apply(gpuFrame, gpuSub); CudaInvoke.Threshold(gpuSub, gpuSub, 12, 255, Emgu.CV.CvEnum.ThresholdType.Binary); gpuSub.Download(outSub); CvInvoke.FindContours(outSub, contours, hiererachy, Emgu.CV.CvEnum.RetrType.List, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxNone); for (int i = 0; i < contours.Size; i++) { if (CvInvoke.ContourArea(contours[i]) > 50) { contoursGood.Push(contours[i]); } } grayImage = new Image <Gray, byte>(frame.Width, frame.Height, new Gray(0)); grayImage.SetZero(); CvInvoke.DrawContours(grayImage, contoursGood, -1, new MCvScalar(255, 255, 255), -1); CvInvoke.Dilate(grayImage, grayImage, element, new Point(-1, -1), 6, Emgu.CV.CvEnum.BorderType.Constant, new MCvScalar(255, 255, 255)); contoursGood.Clear(); CvInvoke.FindContours(grayImage, contours, hiererachy, Emgu.CV.CvEnum.RetrType.List, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxNone); List <Point> points = new List <Point>(); for (int i = 0; i < contours.Size; i++) { MCvMoments moments = CvInvoke.Moments(contours[i], false); Point WeightedCentroid = new Point((int)(moments.M10 / moments.M00), (int)(moments.M01 / moments.M00)); points.Add(WeightedCentroid); } blobList.AssignToBlobs(points); blobList.Draw(frame); blobList.Draw(mask); blobList.Update(); CvInvoke.DrawContours(frame, contours, -1, new MCvScalar(0, 0, 255)); imageBox1.Image = frame; imageBox2.Image = mask; grayImage.Dispose(); indexFrame++; } }
public void FindRectangles() { Image <Gray, byte> img = originalMat.ToImage <Bgr, byte>().Canny(127, 255); VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); Mat mat = new Mat(); CvInvoke.FindContours(img, contours, mat, RetrType.External, ChainApproxMethod.ChainApproxSimple); VectorOfPoint contour = new VectorOfPoint(); VectorOfVectorOfPoint squares = new VectorOfVectorOfPoint(); for (int i = 0; i < contours.Size; i++) { CvInvoke.ApproxPolyDP(contours[i], contour, CvInvoke.ArcLength(contours[i], true) * 0.02, true); if (contour.Size == 4 && Math.Abs(CvInvoke.ContourArea(contour)) > SearchSize && CvInvoke.IsContourConvex(contour)) { Rectangle rect = CvInvoke.BoundingRectangle(contour); Rectangle maskRect = new Rectangle(rect.Location, rect.Size); Size inflateSize = new Size((int)(maskRect.Width * -0.1), (int)(maskRect.Height * -0.1)); maskRect.Inflate(inflateSize); Mat shapeMat = new Mat(originalMat, maskRect); int[] colorScalars = CvInvoke.Mean(shapeMat).ToArray().Select(x => (int)x).ToArray(); Color color = Color.FromArgb(255, colorScalars[2], colorScalars[1], colorScalars[0]); if (color.ToArgb() == Color.ToArgb()) { double maxCosine = 0.0; for (int j = 2; j < 5; j++) { double cosine = Math.Abs(Angle(contour[j % 4], contour[j - 2], contour[j - 1])); maxCosine = cosine > maxCosine ? cosine : maxCosine; } if (maxCosine < 0.3) { squares.Push(contour); } } } } Image <Bgr, byte> imgOut = new Image <Bgr, byte>(originalImage); CvInvoke.DrawContours(imgOut, squares, -1, new MCvScalar(0, 0, 255)); image.Image = new Bitmap(imgOut.Bitmap); imgOut.Dispose(); }
public static VectorOfVectorOfPoint getRectangles(VectorOfVectorOfPoint contours, int minArea = 0) { var rectangleContours = new VectorOfVectorOfPoint(); for (int i = 0; i < contours.Size; i++) { if (isRectangle(contours[i].ToArray()) && CvInvoke.ContourArea(contours[i], false) > minArea) { rectangleContours.Push(contours[i]); } } return(rectangleContours); }
public ImageResult(string fileName, long fSize, VectorOfVectorOfPoint listOfContours, int[][] brightness, int objectCount) { FileName = fileName; f_FileSize = fSize; Pass = 1; f_ObjectCount = objectCount; GetContours = new VectorOfVectorOfPoint(); Brightness = brightness; for (var i = 0; i < listOfContours.Size; i++) { GetContours.Push(listOfContours[i]); } }
public static VectorOfVectorOfPoint getTriangles(VectorOfVectorOfPoint contours, int minArea = 0) { var trianglesContours = new VectorOfVectorOfPoint(); for (int i = 0; i < contours.Size; i++) { if (contours[i].Size == 3 && CvInvoke.ContourArea(contours[i], false) > minArea) { trianglesContours.Push(contours[i]); } } return(trianglesContours); }
private void contours_Click(object sender, EventArgs e) { Image <Bgr, byte> image_contour = new Image <Bgr, byte>(imagetest.Width, imagetest.Height, new Bgr(0, 0, 0)); VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); CvInvoke.FindContours(imagetest, contours, null, Emgu.CV.CvEnum.RetrType.External, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxNone); //筛选出面积不为0的轮廓并画出 VectorOfVectorOfPoint use_contours = new VectorOfVectorOfPoint(); for (int i = 0; i < contours.Size; i++) { //获取独立的连通轮廓 VectorOfPoint contour = contours[i]; //计算连通轮廓的面积 double area = CvInvoke.ContourArea(contour); //进行面积筛选 if (area > 20) { //添加筛选后的连通轮廓 use_contours.Push(contour); } } CvInvoke.DrawContours(image_contour, use_contours, -1, new MCvScalar(0, 255, 0), 1); int ksize = use_contours.Size; double[] m00 = new double[ksize]; double[] m01 = new double[ksize]; double[] m10 = new double[ksize]; Point[] gravity = new Point[ksize];//用于存储轮廓中心点坐标 MCvMoments[] moments = new MCvMoments[ksize]; for (int i = 0; i < ksize; i++) { VectorOfPoint contour = use_contours[i]; //计算当前轮廓的矩 moments[i] = CvInvoke.Moments(contour, true); m00[i] = moments[i].M00; m01[i] = moments[i].M01; m10[i] = moments[i].M10; int x = Convert.ToInt32(m10[i] / m00[i]);//计算当前轮廓中心点坐标 int y = Convert.ToInt32(m01[i] / m00[i]); gravity[i] = new Point(x, y); //image_contour.Draw(new CircleF(gravity[i], 2), new Bgr(0, 255, 0), 2); image_contour.Draw(i.ToString(), new Point(gravity[i].X - 10, gravity[i].Y + 30), Emgu.CV.CvEnum.FontFace.HersheyComplexSmall, 1, new Bgr(0, 0, 255)); image_contour.Draw((gravity[i].X).ToString() + "," + (gravity[i].Y).ToString(), gravity[i], Emgu.CV.CvEnum.FontFace.HersheySimplex, 1, new Bgr(0, 0, 255)); } imageBox1.Image = image_contour; }
public static VectorOfVectorOfPoint SortContours(VectorOfVectorOfPoint contours) { VectorOfVectorOfPoint contoursResult = new VectorOfVectorOfPoint(); for (int i = 0; i < contours.Size; i++) { if (contours[i].Size > 4) { contoursResult.Push(contours[i]); } } return(contoursResult); }
// Update is called once per frame void Update() { fluxVideo.Grab(); Mat grey = new Mat(); CvInvoke.CvtColor(image, grey, ColorConversion.Bgr2Gray); CvInvoke.AdaptiveThreshold(grey, grey, 255, AdaptiveThresholdType.GaussianC, ThresholdType.BinaryInv, 21, 11); CvInvoke.FindContours(grey, contours, m, Emgu.CV.CvEnum.RetrType.External, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple); for (int i = 0; i < contours.Size; i++) { double perimeter = CvInvoke.ArcLength(contours[i], true); CvInvoke.ApproxPolyDP(contours[i], approx, 0.04 * perimeter, true); if (approx.Size == 4) { if (CvInvoke.ContourArea(contours[i]) > 300) { var rect = CvInvoke.BoundingRectangle(approx); if (rect.Height > 0.95 * rect.Width || rect.Height < 0.95 * rect.Width) { candidates.Push(approx); CvInvoke.DrawContours(image, contours, i, new MCvScalar(0, 255, 0), 4); CvInvoke.Rectangle(image, rect, new MCvScalar(255, 0, 0)); } } } } for (int i = 0; i < candidates.Size; i++) { System.Drawing.PointF[] pts = new System.Drawing.PointF[4]; pts[0] = new System.Drawing.PointF(0, 0); pts[1] = new System.Drawing.PointF(64 - 1, 0); pts[2] = new System.Drawing.PointF(64 - 1, 64 - 1); pts[3] = new System.Drawing.PointF(0, 64 - 1); VectorOfPointF perfect = new VectorOfPointF(pts); System.Drawing.PointF[] sample_pts = new System.Drawing.PointF[4]; for (int ii = 0; ii < 4; ii++) { sample_pts[ii] = new System.Drawing.PointF(candidates[i][ii].X, candidates[i][ii].Y); } VectorOfPointF sample = new VectorOfPointF(sample_pts); var tf = CvInvoke.GetPerspectiveTransform(sample, perfect); Mat warped = new Mat(); CvInvoke.WarpPerspective(image, warped, tf, new System.Drawing.Size(64, 64)); CvInvoke.Imshow("yo", warped); } CvInvoke.WaitKey(24); }
private void button3_Click(object sender, EventArgs e) { textBox1.Text = ""; CircleF circle = new CircleF(); VectorOfVectorOfPoint vvp = new VectorOfVectorOfPoint(); //存储所有的轮廓 VectorOfVectorOfPoint cricle_vvp = new VectorOfVectorOfPoint(); //存储筛选过后圆的轮廓. CvInvoke.FindContours(gray_scr, vvp, null, Emgu.CV.CvEnum.RetrType.List, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxNone); for (int i = 0; i < vvp.Size; i++) //遍历所有轮廓 { double area = CvInvoke.ContourArea(vvp[i]); //假设如果轮廓为圆,即面积为:PI*R^2 double length = CvInvoke.ArcLength(vvp[i], true); //假设如果轮廓为圆,即周长为:PI*R*2 double r = (area / length) * 2; //假设如果轮廓为圆: 面积/周长=R/2;即半径为: 面积*2/周长 double c = (area / (length * length)); //假设轮廓为圆: 面积/(周长的平方)=1/(4*PI)常数; 0.0796 if (c > 0.063 && c < 0.08) //满足相应条件 { if (r < Convert.ToInt32(numericUpDown4.Value) && r > Convert.ToInt32(numericUpDown3.Value)) { cricle_vvp.Push(vvp[i]);//提取筛选后的圆轮廓 circle = CvInvoke.MinEnclosingCircle(vvp[i]); textBox1.Text += "找到第" + i + "个圆,比例值:" + c + "半径为:" + r + "\r\n" + "圆心为:" + circle.Center.X + "," + circle.Center.Y + "," + "半径为:" + circle.Radius + "\r\n"; } } } Mat result = new Mat(gray_scr.Size, Emgu.CV.CvEnum.DepthType.Cv8U, 3); result.SetTo(new MCvScalar(0, 0, 0)); CvInvoke.DrawContours(result, vvp, -1, new MCvScalar(0, 255, 0)); imageBox6.Image = result; if (cricle_vvp.Size == 0) { textBox1.Text = "没有找到合适的圆!"; } else { for (int i = 0; i < cricle_vvp.Size; i++) { //Rectangle rect = CvInvoke.BoundingRectangle(cricle_vvp[i]); //src.Draw(rect, new Bgr(0, 0, 255), 2); CvInvoke.Circle(src, new Point((int)circle.Center.X, (int)circle.Center.Y), (int)circle.Radius, new Bgr(0, 0, 255).MCvScalar, 3); CvInvoke.Circle(src, new Point((int)circle.Center.X, (int)circle.Center.Y), 3, new Bgr(0, 0, 255).MCvScalar, 3); } imageBox1.Image = src; imageBox1.Refresh(); } }