/// <summary> /// 选择轮廓并计算轮廓特征 /// </summary> private void NumListContours_ValueChanged(object sender, EventArgs e) { //先绘制此条轮廓 mArgs.FeatureType = ContourFeatureType.Contour; mArgs.ContourIndex = (int)NumListContours.Value; Params_Changed(); //再计算此条轮廓的特征 VectorOfPoint cnt = mContours[mArgs.ContourIndex]; TxtContourArea.Text = CvInvoke.ContourArea(cnt).ToString(); TxtArcLength.Text = CvInvoke.ArcLength(cnt, true).ToString(); MCvMoments moments = CvInvoke.Moments(cnt); TxtMoments.Text = string.Format("M00: {0}\r\nM01: {1}\r\nM02: {02}\r\nM03: {3}\r\nM10: {4}\r\nM11: {5}\r\nM12: {6}\r\nM20: {7}\r\nM21: {8}\r\nM30: {9}\r\n" + "Mu02: {10}\r\nMu03: {11}\r\nMu11: {12}\r\nMu12: {13}\r\nMu20: {14}\r\nMu21: {15}\r\nMu30: {16}", moments.M00, moments.M01, moments.M02, moments.M03, moments.M10, moments.M11, moments.M12, moments.M20, moments.M21, moments.M30, moments.Mu02, moments.Mu03, moments.Mu11, moments.Mu12, moments.Mu20, moments.Mu21, moments.Mu30); TxtContourCenter.Text = string.Format("({0},{1})", moments.M10 / moments.M00, moments.M01 / moments.M00); double[] huMoments = moments.GetHuMoment(); for (int i = 0; i < huMoments.Length; i++) { TxtHuMoments.Text += huMoments[i] + "\r\n"; } }
private Image <Bgr, byte> DetectAndDrawCircles(Image <Gray, byte> binaryImage, Image <Bgr, byte> imageFrame, MCvScalar color, Color filter) { //// detect circles //CircleF[] circles = CvInvoke.HoughCircles(binaryImage, Emgu.CV.CvEnum.HoughType.Gradient, 2, binaryImage.Rows / 8, 100, 50, 0, 100); Image <Bgr, byte> temp = imageFrame.Clone(); VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); CvInvoke.FindContours(binaryImage, contours, null, Emgu.CV.CvEnum.RetrType.External, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple); //for(int i = 0; i < contours.Size; i++) if (contours.Size > 0) { MCvMoments moment = CvInvoke.Moments(contours[0]); double area = moment.M00; if (area > 400) { MCvPoint2D64f position = new MCvPoint2D64f(moment.M10 / area, moment.M01 / area); Point p = new Point((int)position.X, (int)position.Y); if (filter == Color.Blue) { blueCircle.Add(p); } CvInvoke.Circle(temp, p, 2, color, 3); CvInvoke.DrawContours(temp, contours, 0, color, 4); } } return(temp); }
private void calculPosBaguette() { // retrieve the position of the object in the player hand, using the centroid of the biggest contour // the position is normalized in camera space (from -1 to +1) if (biggestContours != null) { MCvMoments moments = CvInvoke.Moments(biggestContours); float objectPosition_x = (-1.0f + 2.0f * (float)(moments.M10 / moments.M00) / 640); float objectPosition_y = (+1.0f - 2.0f * (float)(moments.M01 / moments.M00) / 480); if (!float.IsNaN(objectPosition_x)) { { Debug.Log(objectPosition_x); float x = 0.0f; if (objectPosition_x < 0) { x = Math.Max(objectPosition_x, -0.4f); } else { x = Math.Min(objectPosition_x, 0.47f); } baguette.transform.position = new Vector3(-x, baguette.transform.position.y, baguette.transform.position.z); } } } }
double getEccentricity(MCvMoments mu) { return((Math.Sqrt(mu.m20 - mu.m02) - 4 * mu.m11 * mu.m11) / (Math.Sqrt(mu.m20 + mu.m02))); //double bigSqrt = Math.Sqrt((mu.m20 - mu.m02) * (mu.m20 - mu.m02) + 4 * mu.m11 * mu.m11); //return (double)(mu.m20 + mu.m02 + bigSqrt) / (mu.m20 + mu.m02 - bigSqrt); }
/// <summary> /// 获取机器人法兰中心移动坐标和旋转角度,camera=0代表右侧相机,camera=1代表前方向机 /// </summary> /// <param name="bitmap">产品图像</param> /// <param name="cameraID">相机编号</param> public ImageInfo GetProductParamters(Bitmap bitmap, int cameraID, int spongeH) { ImageInfo imageInfo = new ImageInfo(); List <VectorOfPoint> imageContours = GetContours(bitmap, cameraID, spongeH); VectorOfPoint productContour = imageContours.Max(); MCvMoments cvMoments = CvInvoke.Moments(productContour, false); int gravityX = Convert.ToInt32(cvMoments.M10 / cvMoments.M00); int gravityY = Convert.ToInt32(cvMoments.M01 / cvMoments.M00); Trace.WriteLine("用矩计算质心:X = " + gravityX.ToString() + "\tY = " + gravityY.ToString()); if (productContour != null) { var minRect = CvInvoke.MinAreaRect(productContour); //最小外接矩形 PointF[] pt = CvInvoke.BoxPoints(minRect); //最小外接矩形四个角点 PointF po = minRect.Center; //最小外接矩形中心 Trace.WriteLine("当前外接矩形中心:" + po.ToString()); imageInfo.CenterOfImg = po; //长轴,短轴,倾角计算: //AxisLong = Math.Sqrt(Math.Pow(pt[1].X - pt[0].X, 2) + Math.Pow(pt[1].Y - pt[0].Y, 2)); imageInfo.AxisLong = minRect.Size.Width > minRect.Size.Height ? minRect.Size.Width : minRect.Size.Height; //AxisShort = Math.Sqrt(Math.Pow(pt[2].X - pt[1].X, 2) + Math.Pow(pt[2].Y - pt[1].Y, 2)); imageInfo.AxisShort = minRect.Size.Height <= minRect.Size.Width ? minRect.Size.Height : minRect.Size.Width;; imageInfo.ImageCorner = pt; imageInfo.RotatedAngle = Math.Abs(minRect.Angle) > 45?minRect.Angle + 90:minRect.Angle; Matrix <double> imgCenter = new Matrix <double>(3, 1) {
private void button3_Click(object sender, EventArgs e) { Mat result = new Mat(new Size(scr.Width - temp.Width + 1, scr.Height - temp.Height + 1), Emgu.CV.CvEnum.DepthType.Cv8U, 1); //创建mat 存储输出匹配结果。 CvInvoke.MatchTemplate(scr, temp, result, Emgu.CV.CvEnum.TemplateMatchingType.SqdiffNormed); //采用系数匹配法,匹配值越大越接近准确图像。 CvInvoke.Normalize(result, result, 255, 0, Emgu.CV.CvEnum.NormType.MinMax); //把数据进行以最大值255 最小值0 进行归一化。 result = result.ToImage <Gray, byte>().Mat; //result 类型转成Byte 类型。 VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); //创建VectorOfVectorOfPoint 类型保存轮廓。 int threshold = 180; //设置阈值。 Mat data = new Mat(); //创建data 存储阈值后的图像。 while (true) { CvInvoke.Threshold(result, data, threshold, 255, Emgu.CV.CvEnum.ThresholdType.BinaryInv); //阈值操作。 CvInvoke.FindContours(data, contours, null, Emgu.CV.CvEnum.RetrType.External, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple); // 存储轮廓。 if (contours.Size <= 1) //判断匹配个数是否小于等于10 { break; } threshold -= 2; //阈值降低 } for (int i = 0; i < contours.Size; i++) //遍历每个连通域。 { VectorOfPoint contour = contours[i]; MCvMoments moment = CvInvoke.Moments(contour); //获得连通域的矩 Point p = new Point((int)(moment.M10 / moment.M00), (int)(moment.M01 / moment.M00)); // 获得连通域的中心 CvInvoke.Rectangle(scr, new Rectangle(p, temp.Size), new MCvScalar(0, 0, 255), 4); //绘制匹配区域。 } imageBox1.Image = scr; }
void Moments(Image <Gray, byte> img, ref Double[] MomentSp, ref Double[] MomentCen, ref Double[] MomentNor) { Image <Gray, byte> imgA = new Image <Gray, byte>(img.Size); imgA = img.ThresholdBinary(new Gray(60), new Gray(255)); Contour <Point> contourA = imgA.FindContours(); MCvMoments momentsA = contourA.GetMoments(); for (int xOrder = 0; xOrder <= 3; xOrder++) { for (int yOrder = 0; yOrder <= 3; yOrder++) { if (xOrder + yOrder <= 3) { MomentSp[3 * xOrder + yOrder] = momentsA.GetSpatialMoment(xOrder, yOrder); MomentCen[3 * xOrder + yOrder] = momentsA.GetCentralMoment(xOrder, yOrder); MomentNor[3 * xOrder + yOrder] = momentsA.GetNormalizedCentralMoment(xOrder, yOrder); } } } /* for (int m = 0; m < 12; m++) * { * richTextBox4.AppendText("SP:"+MomentSp[m]+" CEN:"+MomentCen[m]+" Nor: "+MomentNor[m]+"\n"); * }*/ }
void Hu(Image <Gray, byte> img, ref Double[] MomentHu, ref Double[] MomentR) { double r1 = 0, r2 = 0, r3 = 0, r4 = 0, r5 = 0, r6 = 0, r7 = 0, r8 = 0, r9 = 0, r10 = 0; Image <Gray, byte> imgA = new Image <Gray, byte>(img.Size); imgA = img.ThresholdBinary(new Gray(60), new Gray(255)); Contour <Point> contourA = imgA.FindContours(); MCvMoments momentsA = contourA.GetMoments(); MCvHuMoments HuMomentsA = momentsA.GetHuMoment(); MomentHu[0] = HuMomentsA.hu1; MomentHu[1] = HuMomentsA.hu2; MomentHu[2] = HuMomentsA.hu3; MomentHu[3] = HuMomentsA.hu4; MomentHu[4] = HuMomentsA.hu5; MomentHu[5] = HuMomentsA.hu6; Rmoment(HuMomentsA.hu1, HuMomentsA.hu2, HuMomentsA.hu3, HuMomentsA.hu4, HuMomentsA.hu5, HuMomentsA.hu6, ref r1, ref r2, ref r3, ref r4, ref r5, ref r6, ref r7, ref r8, ref r9, ref r10); MomentR[0] = r1; MomentR[1] = r2; MomentR[2] = r3; MomentR[3] = r4; MomentR[4] = r5; MomentR[5] = r6; MomentR[6] = r7; MomentR[7] = r8; MomentR[8] = r9; MomentR[9] = r10; }
public static List <NeuronBodyMask> GenerateNeuronBodyMasks(List <VectorOfPoint> input) { List <NeuronBodyMask> result = new List <NeuronBodyMask>(); NeuronBodyMask tmp_mask = new NeuronBodyMask(); Rectangle R = new Rectangle(); Image <Gray, Byte> maskImg = new Image <Gray, Byte>(1, 1, new Gray(0)); Point C = new Point(); VectorOfVectorOfPoint tmp_vvop = new VectorOfVectorOfPoint(); MCvMoments moments = new MCvMoments(); for (int i = 0; i < input.Count; i++) { R = CvInvoke.BoundingRectangle(input[i]); maskImg = new Image <Gray, byte>(184, 140, new Gray(0)); tmp_vvop = new VectorOfVectorOfPoint(); tmp_vvop.Push(input[i]); CvInvoke.DrawContours(maskImg, tmp_vvop, -1, new MCvScalar(255), -1, LineType.EightConnected); moments = CvInvoke.Moments(input[i]); C = new Point((int)(moments.M10 / moments.M00), (int)(moments.M01 / moments.M00)); //maskImg.Draw(new Point[] { C }, new Gray(100), 1); tmp_mask = new NeuronBodyMask(R, maskImg, C); result.Add(tmp_mask); } return(result); }
/// <summary> /// Get the moments for this point sequence /// </summary> /// <returns>the moments for this point sequence</returns> public MCvMoments GetMoments() { MCvMoments moment = new MCvMoments(); CvInvoke.cvMoments(Ptr, ref moment, 0); return(moment); }
static ContourProperties FishContour(Mat image_raw, Mat background) { bool fishcont_found = false; Size frsize = new Size(image_raw.Width, image_raw.Height); Mat image = new Mat(frsize, Emgu.CV.CvEnum.DepthType.Cv8U, 1); ContourProperties contprops = new ContourProperties(); ThresholdType ttype = 0; VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); Mat hierarchy = new Mat(); CvInvoke.AbsDiff(image_raw, background, image); // This should be 30 as the LB. Switched to 20 to see if i could pick up paramecia. CvInvoke.Threshold(image, image, 10, 255, ttype); // UNCOMMENT IF YOU WANT TO SHOW THRESHOLDED IMAGE String camerawindow = "Camera Window"; CvInvoke.NamedWindow(camerawindow); CvInvoke.Imshow(camerawindow, image); CvInvoke.WaitKey(1); CvInvoke.FindContours(image, contours, hierarchy, RetrType.External, ChainApproxMethod.ChainApproxNone); int fish_contour_index = 0; Rectangle bounding_rect = new Rectangle(); for (int ind = 0; ind < contours.Size; ind++) { bounding_rect = CvInvoke.BoundingRectangle(contours[ind]); if (bounding_rect.Width > bounding_rect.Height) { contprops.height = bounding_rect.Width; } else { contprops.height = bounding_rect.Height; } if (contprops.height < 50 && contprops.height > 25) { fish_contour_index = ind; fishcont_found = true; break; } } if (fishcont_found) { var contourCenter = new Point(); var contourCOM = new Point(); MCvMoments com = new MCvMoments(); com = CvInvoke.Moments(contours[fish_contour_index]); contourCOM.X = (int)(com.M10 / com.M00); contourCOM.Y = (int) (com.M01 / com.M00); contourCenter.X = (int)(bounding_rect.X + (float)bounding_rect.Width / (float)2); contourCenter.Y = (int)(bounding_rect.Y + (float)bounding_rect.Height / (float)2); contprops.center = contourCenter; contprops.com = contourCOM; } else { Console.WriteLine(contprops.com); Console.WriteLine(contprops.height); Console.WriteLine("no contours"); } return contprops; }
/// <summary> /// Suskaičiuojamos kamuoliuko koordinatės /// </summary> /// <param name="img"></param> /// <returns></returns> public Point GetBallCoordinates(Mat img) { MCvMoments moments = CvInvoke.Moments(ProcessFrame(img)); double dM01 = moments.M01; double dM10 = moments.M10; double dArea = moments.M00; int posX = (int)(dM10 / dArea); int posY = (int)(dM01 / dArea); return(new Point(posX, posY)); }
void DrawLine(VectorOfPoint biggestContour, Image <Hsv, Byte> image) { //CENTROID Hsv couleurPoint = new Hsv(179, 100, 50); MCvMoments moments = CvInvoke.Moments(biggestContour); int cx = (int)(moments.M10 / moments.M00); int cy = (int)(moments.M01 / moments.M00); Point[] centroid = { new Point(cx, cy), new Point(cx, cy) }; image.Draw(centroid, couleurPoint, 10); }
private void ProcessFrame(object sender, EventArgs e) { if (_capture != null && _capture.Ptr != IntPtr.Zero) { _capture.Retrieve(frame, 0); gpuFrame.Upload(frame); cudaBgMOG2.Apply(gpuFrame, gpuSub); CudaInvoke.Threshold(gpuSub, gpuSub, 12, 255, Emgu.CV.CvEnum.ThresholdType.Binary); gpuSub.Download(outSub); CvInvoke.FindContours(outSub, contours, hiererachy, Emgu.CV.CvEnum.RetrType.List, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxNone); for (int i = 0; i < contours.Size; i++) { if (CvInvoke.ContourArea(contours[i]) > 50) { contoursGood.Push(contours[i]); } } grayImage = new Image <Gray, byte>(frame.Width, frame.Height, new Gray(0)); grayImage.SetZero(); CvInvoke.DrawContours(grayImage, contoursGood, -1, new MCvScalar(255, 255, 255), -1); CvInvoke.Dilate(grayImage, grayImage, element, new Point(-1, -1), 6, Emgu.CV.CvEnum.BorderType.Constant, new MCvScalar(255, 255, 255)); contoursGood.Clear(); CvInvoke.FindContours(grayImage, contours, hiererachy, Emgu.CV.CvEnum.RetrType.List, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxNone); List <Point> points = new List <Point>(); for (int i = 0; i < contours.Size; i++) { MCvMoments moments = CvInvoke.Moments(contours[i], false); Point WeightedCentroid = new Point((int)(moments.M10 / moments.M00), (int)(moments.M01 / moments.M00)); points.Add(WeightedCentroid); } blobList.AssignToBlobs(points); blobList.Draw(frame); blobList.Draw(mask); blobList.Update(); CvInvoke.DrawContours(frame, contours, -1, new MCvScalar(0, 0, 255)); imageBox1.Image = frame; imageBox2.Image = mask; grayImage.Dispose(); indexFrame++; } }
public Form1() { InitializeComponent(); detector = new AdaptiveSkinDetector(1, AdaptiveSkinDetector.MorphingMethod.NONE); hsv_min = new Hsv(0, 45, 0); hsv_max = new Hsv(20, 255, 255); YCrCb_min = new Ycc(0, 131, 80); YCrCb_max = new Ycc(255, 185, 135); float flot = 15; bgs = new BackgroundSubtractorMOG2(30, flot, false); mv = new MCvMoments(); }
private void detectarCentro() { matrizImagen = new int[img.Height, img.Width]; /*if (img!=null) * { * for(int i = 0;i<img.Height;i++) * { * for(int j=0;j<img.Width;j++) * { * if (imgout[i, j].Intensity == 0) * { * matrizImagen[i, j] = 0; * } * * * } * } * }*/ Emgu.CV.Util.VectorOfVectorOfPoint contours = new Emgu.CV.Util.VectorOfVectorOfPoint(); Mat mat = new Mat(); CvInvoke.FindContours(imgout, contours, mat, Emgu.CV.CvEnum.RetrType.External, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple); for (int x = 0; x < contours.Size; x++) { for (int i = 0; i < img.Height; i++) { for (int j = 0; j < img.Width; j++) { imgout[i, j] = new Gray(0); } } var area = CvInvoke.ContourArea(contours[x]); if (area > (int)numericSenCount.Value) { CvInvoke.DrawContours(imgout, contours, x, new MCvScalar(255, 255, 255)); MCvMoments moments = CvInvoke.Moments(imgout.Mat, false); Point centroide = new Point((int)(moments.M10 / moments.M00), (int)(moments.M01 / moments.M00)); papel = pictureBox1.CreateGraphics(); pluma.Width = 5; pluma.Color = Color.DarkRed; papel.DrawRectangle(pluma, (int)(moments.M10 / moments.M00), (int)(moments.M01 / moments.M00), 5, 5); } } detectarBorde(); //pictureBox2.Image = imgout.Bitmap; }
private void contours_Click(object sender, EventArgs e) { Image <Bgr, byte> image_contour = new Image <Bgr, byte>(imagetest.Width, imagetest.Height, new Bgr(0, 0, 0)); VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint(); CvInvoke.FindContours(imagetest, contours, null, Emgu.CV.CvEnum.RetrType.External, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxNone); //筛选出面积不为0的轮廓并画出 VectorOfVectorOfPoint use_contours = new VectorOfVectorOfPoint(); for (int i = 0; i < contours.Size; i++) { //获取独立的连通轮廓 VectorOfPoint contour = contours[i]; //计算连通轮廓的面积 double area = CvInvoke.ContourArea(contour); //进行面积筛选 if (area > 20) { //添加筛选后的连通轮廓 use_contours.Push(contour); } } CvInvoke.DrawContours(image_contour, use_contours, -1, new MCvScalar(0, 255, 0), 1); int ksize = use_contours.Size; double[] m00 = new double[ksize]; double[] m01 = new double[ksize]; double[] m10 = new double[ksize]; Point[] gravity = new Point[ksize];//用于存储轮廓中心点坐标 MCvMoments[] moments = new MCvMoments[ksize]; for (int i = 0; i < ksize; i++) { VectorOfPoint contour = use_contours[i]; //计算当前轮廓的矩 moments[i] = CvInvoke.Moments(contour, true); m00[i] = moments[i].M00; m01[i] = moments[i].M01; m10[i] = moments[i].M10; int x = Convert.ToInt32(m10[i] / m00[i]);//计算当前轮廓中心点坐标 int y = Convert.ToInt32(m01[i] / m00[i]); gravity[i] = new Point(x, y); //image_contour.Draw(new CircleF(gravity[i], 2), new Bgr(0, 255, 0), 2); image_contour.Draw(i.ToString(), new Point(gravity[i].X - 10, gravity[i].Y + 30), Emgu.CV.CvEnum.FontFace.HersheyComplexSmall, 1, new Bgr(0, 0, 255)); image_contour.Draw((gravity[i].X).ToString() + "," + (gravity[i].Y).ToString(), gravity[i], Emgu.CV.CvEnum.FontFace.HersheySimplex, 1, new Bgr(0, 0, 255)); } imageBox1.Image = image_contour; }
protected PointF GetPositionFromBinary(Emgu.CV.Image <Emgu.CV.Structure.Gray, Byte> binaryImage, bool drawPositionInfo) { int maxNumberOfObects = 50; int minObjectArea = 30 * 30; int maxObjectArea = (int)(binaryImage.Size.Width * binaryImage.Size.Height / 1.05); ratPosition = new IncorrectPosition(); //Point positionNotFound = new Point(50000, 50000); //var positionNotFound = new PointF(float.NaN, float.NaN); Emgu.CV.Image <Emgu.CV.Structure.Gray, Byte> temporaryBinaryImage = binaryImage.Clone(); Emgu.CV.Util.VectorOfVectorOfPoint contours = new Emgu.CV.Util.VectorOfVectorOfPoint(); Emgu.CV.Mat hierarchy = new Mat(); CvInvoke.FindContours(temporaryBinaryImage, contours, hierarchy, RetrType.Ccomp, ChainApproxMethod.ChainApproxSimple); if (contours.Size > 0) { int numberOfObjects = contours.Size; //var h = hierarchy.GetInputOutputArray(); if (numberOfObjects < maxNumberOfObects) { float referenceArea = 0; //index = hierarchy.GetData(new int[] { index, 0 })[0] for (int index = 0; index < numberOfObjects; index++) { IInputArray contour = contours.GetInputArray().GetMat(index); MCvMoments moment = CvInvoke.Moments(contour); float area = (float)moment.M00; if (area > minObjectArea && area < maxObjectArea && area > referenceArea) { ratPosition = new CorrectPosition( x: (float)moment.M10 / area, y: (float)moment.M01 / area); referenceArea = area; } } } } if (drawPositionInfo) { DrawPositionInfo(); } var positionToSend = new PointF ( x: -2 * (ratPosition.Value.X - binaryImage.Size.Width / 2) / binaryImage.Size.Width, y: -2 * (ratPosition.Value.Y - binaryImage.Size.Height / 2) / binaryImage.Size.Height ); return(positionToSend); }
private void frameProcessor(object sender, EventArgs e) { // Retreive frame from webcam if (fromWebcam.Retrieve(currentVideoFrame, 0)) { // Resize should preserve aspect ratio resizedCurrentVideoFrame = keepAspectRatioResize(currentVideoFrame, new Size(fromCamPictureBox.Width, fromCamPictureBox.Height)); //CvInvoke.Blur(resizedCurrentVideoFrame, blurOriginalCurrentVideoFrame, new Size(3, 3), new Point(-1, -1)); CvInvoke.MedianBlur(resizedCurrentVideoFrame, blurOriginalCurrentVideoFrame, 5); CvInvoke.CvtColor(blurOriginalCurrentVideoFrame, hsvVideoFrame, ColorConversion.Bgr2Hsv); Mat[] hsvImageChannels = hsvVideoFrame.Split(); // Iterate over defined color range for (int colorRange = 0; colorRange < interestedColors.Count; colorRange++) { Mat segmentationResultMat = new Mat(); CvInvoke.InRange(hsvImageChannels[0], new ScalarArray(interestedColors[colorRange].Min), new ScalarArray(interestedColors[colorRange].Max), segmentationResultMat); VectorOfVectorOfPoint currentColorContours = new VectorOfVectorOfPoint(); CvInvoke.FindContours(segmentationResultMat, currentColorContours, null, Emgu.CV.CvEnum.RetrType.External, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple); int currentColorBlob = 0; double sumContour = 0; for (int contourNo = 0; contourNo < currentColorContours.Size; contourNo++) { double currentContourArea = CvInvoke.ContourArea(currentColorContours[contourNo]); if (currentContourArea > resizedCurrentVideoFrame.Width * resizedCurrentVideoFrame.Height * 0.0003 && currentContourArea < resizedCurrentVideoFrame.Width * resizedCurrentVideoFrame.Height * 0.5) { sumContour += currentContourArea; Rectangle contourBBox = CvInvoke.BoundingRectangle(currentColorContours[contourNo]); CvInvoke.DrawContours(resizedCurrentVideoFrame, currentColorContours, contourNo, new MCvScalar(0, 255, 0), 2); MCvMoments contourMoment = CvInvoke.Moments(currentColorContours[contourNo]); Point weightedCentroid = new Point((int)(contourMoment.M10 / contourMoment.M00), (int)(contourMoment.M01 / contourMoment.M00)); CvInvoke.PutText(resizedCurrentVideoFrame, (colorRange + 1).ToString(), weightedCentroid, FontFace.HersheyComplexSmall, 4.0, new Bgr(0, 0, 255).MCvScalar); currentColorBlob++; } } // Use timer to update UI blob count blobCount[colorRange] = (sumContour / (currentVideoFrame.Rows * currentVideoFrame.Cols)); } fromCamPictureBox.Image = resizedCurrentVideoFrame.Bitmap; } else { //No Frame Retreive, reopen camera => Something may be happen (VirtualBox bug) System.Console.WriteLine("No Frame"); } }
//public static Contour<Point> CopyWithTransform(this Contour<Point> sourceContour, Func<Point, Point> transform) //{ // MemStorage stor = new MemStorage(); // Contour<Point> cpContour = new Contour<Point>(stor); // List<Point> lPointsToPush = new List<Point>(); // foreach (Point pt in sourceContour) lPointsToPush.Add(transform(pt)); // cpContour.PushMulti(lPointsToPush.ToArray(), Emgu.CV.CvEnum.BACK_OR_FRONT.BACK); // Contour<Point> copyContourTransformed = new Contour<Point>(0, stor); // return copyContourTransformed; //} //public static Contour<Point> Copy(this Contour<Point> sourceContour) //{ // Contour<Point> copyContour = new Contour<Point>(new MemStorage()); // copyContour.PushMulti(sourceContour.ToArray(), Emgu.CV.CvEnum.BACK_OR_FRONT.BACK); // return copyContour; //} public static PointD MassCenter(this Contour <Point> theContour) { //Rectangle rectContourBounding = CvInvoke.BoundingRectangle(theContour); Rectangle rectContourBounding = theContour.BoundingRectangle; // CvInvoke.BoundingRectangle(theContour); Image <Gray, byte> tmpImg = new Image <Gray, byte>(rectContourBounding.Right, rectContourBounding.Bottom); //tmpImg.Draw(theContour.ToArray(), white, -1); tmpImg.Draw(theContour, white, -1); MCvMoments moments = tmpImg.GetMoments(true); double cx = moments.m10 / moments.m00; double cy = moments.m01 / moments.m00; return(new PointD(cx, cy)); }
private double area_check(IImage bw) { double area; MCvMoments moments = new MCvMoments(); CvInvoke.cvMoments(bw.Ptr, ref moments, 1); double moment10 = CvInvoke.cvGetSpatialMoment(ref moments, 1, 0); double moment01 = CvInvoke.cvGetSpatialMoment(ref moments, 0, 1); area = CvInvoke.cvGetCentralMoment(ref moments, 0, 0); corX = (int)(moment10 / area); corY = (int)(moment01 / area); return(area); }
void Hu(Image <Gray, byte> img, ref Double[] MomentHu) { Image <Gray, byte> imgA = new Image <Gray, byte>(img.Size); imgA = img.ThresholdBinary(new Gray(60), new Gray(255)); Contour <Point> contourA = imgA.FindContours(); MCvMoments momentsA = contourA.GetMoments(); MCvHuMoments HuMomentsA = momentsA.GetHuMoment(); MomentHu[0] = HuMomentsA.hu1; MomentHu[1] = HuMomentsA.hu2; MomentHu[2] = HuMomentsA.hu3; MomentHu[3] = HuMomentsA.hu4; MomentHu[4] = HuMomentsA.hu5; MomentHu[5] = HuMomentsA.hu6; }
public void GetHuMoments(Image <Gray, Byte> img, double[] m_hu) { MCvMoments moments = new MCvMoments(); MCvHuMoments hu = new MCvHuMoments(); CvInvoke.cvMoments(img, ref moments, 0); CvInvoke.cvGetHuMoments(ref moments, ref hu); m_hu[0] = hu.hu1; m_hu[1] = hu.hu2; m_hu[2] = hu.hu3; m_hu[3] = hu.hu4; m_hu[4] = hu.hu5; m_hu[5] = hu.hu6; m_hu[6] = hu.hu7; }
// get the Hu public static bool IsGetShapeFeature(string ImgPath, ref MCvHuMoments huMoments) { IntPtr img = IntPtr.Zero; MCvMoments moments = new MCvMoments(); if ((img = CvInvoke.cvLoadImage(ImgPath, LOAD_IMAGE_TYPE.CV_LOAD_IMAGE_GRAYSCALE)) != IntPtr.Zero) { CvInvoke.cvMoments(img, ref moments, 0); CvInvoke.cvGetHuMoments(ref moments, ref huMoments); CvInvoke.cvReleaseImage(ref img); return(true); } else { return(false); } }
//finds the centre of the rectangle in question - only called if a rectangle has been found i.e. pts0-4 exist private Point getCentre(VectorOfPoint contour) { ////find the centre of the vector distance between pts 2 and 0, and 1 and 3 //get the moments of the contour MCvMoments contourMoments = CvInvoke.Moments(contour); /*The moments returned are used to find the centre (x,y) co-ords * x = m10/m00 * y = m01/m00 */ int centre_x = Convert.ToInt32(contourMoments.M10 / contourMoments.M00); int centre_y = Convert.ToInt32(contourMoments.M01 / contourMoments.M00); Point centre = new Point(centre_x, centre_y); return(centre); }
private void btnDetectObjects_Click(object sender, EventArgs e) { var tmpImage = image.Copy(); Image <Gray, byte> grayImage = tmpImage.InRange(new Bgr(color), new Bgr(color)); VectorOfVectorOfPoint shapes = new VectorOfVectorOfPoint(); CvInvoke.FindContours(grayImage, shapes, new Mat(), RetrType.External, ChainApproxMethod.ChainApproxSimple); for (int i = 0; i < shapes.Size; i++) { double shapePerimiter = CvInvoke.ArcLength(shapes[i], true); if (shapePerimiter >= size) { VectorOfPoint approx = new VectorOfPoint(); CvInvoke.ApproxPolyDP(shapes[i], approx, shapePerimiter * 0.03, true); MCvMoments moments = CvInvoke.Moments(shapes[i]); Point shapeCenter = new Point((int)(moments.M10 / moments.M00), (int)(moments.M01 / moments.M00)); MCvScalar textColor = new MCvScalar(0, 0, 0); MCvScalar borderColor = new MCvScalar(0, 0, 0); if (shape.Equals("Pravougaonik") && approx.Size == 4) { CvInvoke.DrawContours(tmpImage, shapes, i, borderColor, 3); CvInvoke.PutText(tmpImage, "Pravougaonik", shapeCenter, FontFace.HersheyTriplex, 0.6, textColor, 2); } else if (shape.Equals("Trougao") && approx.Size == 3) { CvInvoke.DrawContours(tmpImage, shapes, i, borderColor, 3); CvInvoke.PutText(tmpImage, "Trougao", shapeCenter, FontFace.HersheyTriplex, 0.6, textColor, 2); } else if (shape.Equals("Krug") && approx.Size > 4) { CvInvoke.DrawContours(tmpImage, shapes, i, borderColor, 3); CvInvoke.PutText(tmpImage, "Krug", shapeCenter, FontFace.HersheyTriplex, 0.6, textColor, 2); } } } pictureBox.Image = GetBitmapWithFiltersApplied(tmpImage.Bitmap); }
void Moments(Image <Gray, byte> img, ref Double[] MomentSp, ref Double[] MomentCen, ref Double[] MomentNor) { Image <Gray, byte> imgA = new Image <Gray, byte>(img.Size); imgA = img.ThresholdBinary(new Gray(60), new Gray(255)); Contour <Point> contourA = imgA.FindContours(); MCvMoments momentsA = contourA.GetMoments(); for (int xOrder = 0; xOrder <= 3; xOrder++) { for (int yOrder = 0; yOrder <= 3; yOrder++) { if (xOrder + yOrder <= 3) { MomentSp[3 * xOrder + yOrder] = momentsA.GetSpatialMoment(xOrder, yOrder); MomentCen[3 * xOrder + yOrder] = momentsA.GetCentralMoment(xOrder, yOrder); MomentNor[3 * xOrder + yOrder] = momentsA.GetNormalizedCentralMoment(xOrder, yOrder); } } } }
private void mechanika() { image_PB4.Data = image_PB2.Data; Image <Gray, byte> image_mech = image_PB4.Convert <Gray, byte>(); MCvMoments m = CvInvoke.Moments(image_mech, true); srodek_ciezkosci.X = (int)(m.M10 / m.M00); srodek_ciezkosci.Y = (int)(m.M01 / m.M00); System.Diagnostics.Debug.WriteLine("srodek_ciezkosci " + srodek_ciezkosci.ToString()); System.Diagnostics.Debug.WriteLine("srodek_ciezkosci " + srodek_ciezkosci.X); System.Diagnostics.Debug.WriteLine("srodek_ciezkosci " + srodek_ciezkosci.Y); if (srodek_ciezkosci.X > 0 && srodek_ciezkosci.Y > 0) { srodek = true; } else { srodek = false; } }
public Point FindLaser(Image <Gray, Byte> src) { double moment10, moment01; // Moments used in locating laser point double area; // Central moment used in locating laser point Point laser = new Point(); // Laser pointer position MCvMoments moments = new MCvMoments(); // Moment object used for laser pointer location // Find initial moments CvInvoke.cvMoments(src, ref moments, 1); // Find precise moments moment10 = CvInvoke.cvGetSpatialMoment(ref moments, 1, 0); moment01 = CvInvoke.cvGetSpatialMoment(ref moments, 0, 1); area = CvInvoke.cvGetCentralMoment(ref moments, 0, 0); // Calculate laser point position laser.X = (int)(moment10 / area); laser.Y = (int)(moment01 / area); // Return laser pointer position return(laser); }
public static double CircularRatioVA(VectorOfPoint contour) { MCvMoments moments = CvInvoke.Moments(contour); int xCenter = (int)(moments.M10 / moments.M00); int yCenter = (int)(moments.M01 / moments.M00); Point centroid = new Point(xCenter, yCenter); List <double> listOfDist = new List <double>(); for (int i = 0; i < contour.Size; i++) { double distanceNow = Geometry.Distance(contour[i], centroid); listOfDist.Add(distanceNow); } double mean = Mean(listOfDist); double sigma = Sigma(listOfDist, mean); return(sigma / mean); }