private void UpdateBlobParams(object sender, RoutedEventArgs e) { blobParams = new SimpleBlobDetectorParams(); blobParams.FilterByArea = true; blobParams.FilterByColor = true; blobParams.FilterByCircularity = false; blobParams.FilterByConvexity = false; blobParams.FilterByInertia = false; blobParams.blobColor = (byte)255; blobParams.MinThreshold = float.Parse(MinTh.Text); blobParams.MinArea = float.Parse(MinArea.Text); blobParams.MinArea *= blobParams.MinArea; blobParams.MinCircularity = float.Parse(MinCirc.Text); blobParams.MinConvexity = float.Parse(MinConv.Text); blobParams.MinDistBetweenBlobs = float.Parse(MinDist.Text); blobParams.MinInertiaRatio = float.Parse(MinInertia.Text); blobDetector = new SimpleBlobDetector(blobParams); }
public static void Simple(OpenCV cv, Pixbuf pixbuf, Select selection, double ScaleX, double ScaleY) { if (pixbuf != null) { var parameters = new SimpleBlobDetectorParams { MinArea = MinArea, MaxArea = MaxArea, FilterByArea = true }; cv.InitSimpleBlobDetector(parameters); using (var mat = cv.ToMat(pixbuf)) { cv.SimpleBlobDetectionMat( mat, selection, ScaleX, ScaleY ); } } }
static void Main(string[] args) { String win1 = "Orange Detector"; //The name of the window CvInvoke.NamedWindow(win1); //Create the window using the specific name MCvScalar orangeMin = new MCvScalar(10, 211, 140); MCvScalar orangeMax = new MCvScalar(18, 255, 255); Mat img = new Mat("fruits.jpg", ImreadModes.AnyColor); Mat hsvImg = new Mat(); CvInvoke.CvtColor(img, hsvImg, ColorConversion.Bgr2Hsv); CvInvoke.InRange(hsvImg, new ScalarArray(orangeMin), new ScalarArray(orangeMax), hsvImg); CvInvoke.MorphologyEx(hsvImg, hsvImg, MorphOp.Close, new Mat(), new Point(-1, -1), 5, BorderType.Default, new MCvScalar()); SimpleBlobDetectorParams param = new SimpleBlobDetectorParams(); param.FilterByCircularity = false; param.FilterByConvexity = false; param.FilterByInertia = false; param.FilterByColor = false; param.MinArea = 1000; param.MaxArea = 50000; SimpleBlobDetector detector = new SimpleBlobDetector(param); MKeyPoint[] keypoints = detector.Detect(hsvImg); Features2DToolbox.DrawKeypoints(img, new VectorOfKeyPoint(keypoints), img, new Bgr(255, 0, 0), Features2DToolbox.KeypointDrawType.DrawRichKeypoints); CvInvoke.Imshow(win1, img); //Show image CvInvoke.WaitKey(0); //Wait for key press before executing next line CvInvoke.DestroyWindow(win1); }
private void GenerateWorkspace() { //========== Objects and Variables ========== Mat captureFrame; Image <Gray, byte> processFrame; Image <Gray, byte> processContourFrame; Image <Gray, byte> workspaceFrame; MKeyPoint[] squareBlobs; MKeyPoint[] triBlobs; VectorOfVectorOfPoint processFrameContours = new VectorOfVectorOfPoint(); byte processFrameThreshold = 150; //========== Square and Triangle Blob Detector Config ========== SimpleBlobDetectorParams squareBlobDetectorParams = new SimpleBlobDetectorParams(); SimpleBlobDetectorParams triBlobDetectorParams = new SimpleBlobDetectorParams(); squareBlobDetectorParams.FilterByArea = true; squareBlobDetectorParams.FilterByCircularity = false; squareBlobDetectorParams.FilterByColor = false; squareBlobDetectorParams.FilterByInertia = false; squareBlobDetectorParams.FilterByConvexity = false; squareBlobDetectorParams.MinArea = 10000; squareBlobDetectorParams.MaxArea = 100000; squareBlobDetectorParams.MaxCircularity = 1; squareBlobDetectorParams.MinCircularity = 0.67f; squareBlobDetectorParams.blobColor = 255; SimpleBlobDetector squareBlobDetector = new SimpleBlobDetector(squareBlobDetectorParams); triBlobDetectorParams.FilterByArea = true; triBlobDetectorParams.FilterByCircularity = false; triBlobDetectorParams.FilterByColor = false; triBlobDetectorParams.FilterByInertia = false; triBlobDetectorParams.FilterByConvexity = false; triBlobDetectorParams.MinArea = 2000; triBlobDetectorParams.MaxArea = 9999; triBlobDetectorParams.MaxCircularity = 0.66f; triBlobDetectorParams.MinCircularity = 0.01f; triBlobDetectorParams.blobColor = 255; SimpleBlobDetector triBlobDetector = new SimpleBlobDetector(triBlobDetectorParams); //========== Begin Shape Detection Algorithm ========== while (_capture.IsOpened) { //==== Pull Image from the Webcam ==== captureFrame = _capture.QueryFrame(); //==== Scrub Captured Frame ==== //this optomizes the image for paper edge detection Image <Bgr, byte> processFrameBGR = captureFrame.ToImage <Bgr, byte>(); Image <Hsv, byte> processFrameHSV = processFrameBGR.Convert <Hsv, byte>(); processFrameHSV = processFrameHSV.SmoothMedian(9); Image <Gray, byte>[] procssFrameHSVBreakouts = processFrameHSV.Split(); procssFrameHSVBreakouts[2]._EqualizeHist(); //darken any colors in the image procssFrameHSVBreakouts[2] -= procssFrameHSVBreakouts[1] * 1.5 * (procssFrameHSVBreakouts[2].GetAverage().Intensity / 255.0); processFrame = procssFrameHSVBreakouts[2]; //convert to a binary image processFrame._ThresholdBinary(new Gray(processFrameThreshold), new Gray(255)); //==== Detect Paper Border ==== CvInvoke.FindContours(processFrame, processFrameContours, null, Emgu.CV.CvEnum.RetrType.External, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple); int largestContourIndex = 0; double largestContour = 0; double currentContour = 0; for (int i = 0; i < processFrameContours.Size; i++) { currentContour = CvInvoke.ContourArea(processFrameContours[i], false); if (currentContour > largestContour) { largestContour = currentContour; largestContourIndex = i; } } processContourFrame = new Image <Gray, byte>(new Size(captureFrame.Size.Width, captureFrame.Size.Height)); CvInvoke.DrawContours(captureFrame, processFrameContours, largestContourIndex, new MCvScalar(255, 0, 0), 3); VectorOfPoint contourPoints = new VectorOfPoint(); CvInvoke.ApproxPolyDP(processFrameContours[largestContourIndex], contourPoints, CvInvoke.ArcLength(processFrameContours[largestContourIndex], true) * 0.06, true); Point[] workspaceCorners = contourPoints.ToArray(); if (workspaceCorners.Length == 4) { Point[] temp = new Point[workspaceCorners.Length]; temp[0] = workspaceCorners[2]; temp[1] = workspaceCorners[3]; temp[2] = workspaceCorners[0]; temp[3] = workspaceCorners[1]; workspaceCorners = temp; //averages the paper corners over the last five frames workspaceCorners = workspaceCornersAverage(workspaceCorners); for (int i = 0; i < workspaceCorners.Length; i++) { CvInvoke.Circle(captureFrame, workspaceCorners[i], 6, new MCvScalar(0, 255, 0), -1); CvInvoke.PutText(captureFrame, i.ToString(), workspaceCorners[i], Emgu.CV.CvEnum.FontFace.HersheyPlain, 3, new MCvScalar(0, 0, 255), 2); } //==== Warp Paper Perspective ==== workspaceFrame = processFrame; IEnumerable <Point> query = workspaceCorners.OrderBy(point => point.Y).ThenBy(point => point.X); PointF[] ptsSrc = new PointF[4]; PointF[] ptsDst = new PointF[] { new PointF(0, 0), new PointF(workspaceFrame.Width - 1, 0), new PointF(0, workspaceFrame.Height - 1), new PointF(workspaceFrame.Width - 1, workspaceFrame.Height - 1) }; for (int i = 0; i < 4; i++) { ptsSrc[i] = new PointF(query.ElementAt(i).X, query.ElementAt(i).Y); } using (var matrix = CvInvoke.GetPerspectiveTransform(ptsSrc, ptsDst)) { using (var cutImagePortion = new Mat()) { CvInvoke.WarpPerspective(workspaceFrame, cutImagePortion, matrix, new Size(workspaceFrame.Width, workspaceFrame.Height), Inter.Cubic); workspaceFrame = cutImagePortion.ToImage <Gray, Byte>().Flip(FlipType.Vertical).Flip(FlipType.Horizontal) /*.Rotate(180, new Gray(0),false)*/; } } //==== Detect Blobs on Warped Image ==== squareBlobs = squareBlobDetector.Detect(workspaceFrame); triBlobs = triBlobDetector.Detect(workspaceFrame); //==== Transfer Blobs To Shape Object Array ==== Shape[] foundShapes = new Shape[squareBlobs.Length + triBlobs.Length]; for (int i = 0; i < squareBlobs.Length; i++) { foundShapes[i].position.X = (85 * squareBlobs[i].Point.X / workspaceFrame.Width); foundShapes[i].position.Y = (110 * squareBlobs[i].Point.Y / workspaceFrame.Height); foundShapes[i].type = (int)Shape.Type.Square; Point Keypoint = new Point((int)squareBlobs[i].Point.X, (int)squareBlobs[i].Point.Y); CvInvoke.Circle(workspaceFrame, Keypoint, 6, new MCvScalar(150, 150, 0), -1); CvInvoke.PutText(workspaceFrame, "Sq", Keypoint, Emgu.CV.CvEnum.FontFace.HersheyPlain, 2, new MCvScalar(150, 150, 0), 2); } for (int i = 0; i < triBlobs.Length; i++) { foundShapes[i + squareBlobs.Length].position.X = (85 * triBlobs[i].Point.X / workspaceFrame.Width); foundShapes[i + squareBlobs.Length].position.X = (110 * triBlobs[i].Point.X / workspaceFrame.Height); foundShapes[i + squareBlobs.Length].type = (int)Shape.Type.Triangle; Point Keypoint = new Point((int)triBlobs[i].Point.X, (int)triBlobs[i].Point.Y); CvInvoke.Circle(workspaceFrame, Keypoint, 6, new MCvScalar(150, 150, 0), -1); CvInvoke.PutText(workspaceFrame, "Tri", Keypoint, Emgu.CV.CvEnum.FontFace.HersheyPlain, 2, new MCvScalar(150, 150, 0), 2); } //==== Sort Shapes by Order (Front to Back on Paper) ==== IEnumerable <Shape> shapee = foundShapes.OrderBy(position => position.position.Y); foundShapes = shapee.ToArray(); String output = "Shapes: "; for (int i = 0; i < foundShapes.Length; i++) { if (foundShapes[i].type == (int)Shape.Type.Triangle) { output += "T"; } else { output += "S"; } output += foundShapes[i].position.Y; output += " "; } Invoke(new Action(() => { shapes = foundShapes; //this is the global shape array })); //==== Display the Important Images ==== DisplayFrames(captureFrame, processFrame, workspaceFrame); } else { DisplayFrames(captureFrame, processFrame, processFrame); } } }