private void DrawImage(FrameData data) { Emgu.CV.Image <Bgr, byte> toDraw = data.Frame.Clone(); toDraw.Draw(data.Face.rect, new Bgr(Color.Yellow), 1); if (data.Eyes != null) { foreach (var eye in data.Eyes) { Rectangle eyeRect = eye.rect; eyeRect.Offset(data.EyesROI.X, data.EyesROI.Y); toDraw.Draw(eyeRect, new Bgr(Color.White), 2); } } if (data.Mouth.rect != null) { if (data.Mouth.rect.Height != 0 && data.Mouth.rect.Width != 0) { Rectangle mouthRect = data.Mouth.rect; mouthRect.Offset(data.MouthROI.X, data.MouthROI.Y); toDraw.Draw(mouthRect, new Bgr(Color.White), 2); } } frame.Image = toDraw; DrawDiff(data); DrawEyesDifference(data); lastData = data; }
public void ProcessImage(Emgu.CV.Image <Emgu.CV.Structure.Bgr, byte> image) { MCvBox2D mybox = new MCvBox2D(new System.Drawing.PointF(100, 100), new System.Drawing.Size(50, 30), 110); MCvBox2D mybox2 = new MCvBox2D(new System.Drawing.PointF(100, 100), new System.Drawing.Size(50, 30), 0); image.Draw(new Ellipse(mybox), new Bgr(0, 0, 255), 2); image.Draw(new Ellipse(mybox2), new Bgr(0, 255, 0), 2); }
public void DrawObj(Emgu.CV.Image <Emgu.CV.Structure.Bgr, byte> image) { //draw the eye pair detected in the 0th channel with color image.Draw(m_rcBestEyeDetected, new Bgr(m_ObjCor), 2); //draw the left eye detected in the 0th channel with color image.Draw(m_rcBestLeftEye, new Bgr(m_LeftEyeCor), 2); //draw the right eye detected in the 0th channel with color image.Draw(m_rcBestRightEye, new Bgr(m_RightEyeCor), 2); }
override protected void OnFrame(Parsley.Core.BuildingBlocks.FrameGrabber fp, Emgu.CV.Image <Emgu.CV.Structure.Bgr, byte> img) { if (Context.ROIHandler.Last != Rectangle.Empty) { img.Draw(Context.ROIHandler.Last, new Emgu.CV.Structure.Bgr(Color.Green), 1); } }
public Emgu.CV.Mat detectarLineas(Mat img) { double cannyThreshold = umbral; //Convert the image to grayscale and filter out the noise Emgu.CV.Mat uimage = img.Clone(); Emgu.CV.Mat imgret = new Emgu.CV.Mat(); double cannyThresholdLinking = umbral - 60; Emgu.CV.Mat cannyEdges = new Emgu.CV.Mat(); Emgu.CV.CvInvoke.Canny(uimage, cannyEdges, cannyThreshold, cannyThresholdLinking); Emgu.CV.Structure.LineSegment2D[] lines = Emgu.CV.CvInvoke.HoughLinesP( cannyEdges, 1, //Distance resolution in pixel-related units 1 Math.PI / 180.0, //Angle resolution measured in radians. 255 - umbral, //threshold 30, //min Line width 10 5); //gap between lines 1 Emgu.CV.Image <Emgu.CV.Structure.Gray, Byte> lineImage = new Emgu.CV.Image <Emgu.CV.Structure.Gray, Byte>(img.Width, img.Height); foreach (Emgu.CV.Structure.LineSegment2D line in lines) { lineImage.Draw(line, new Gray(255), 2); } Emgu.CV.Mat imgInv = new Emgu.CV.Mat(); Emgu.CV.CvInvoke.BitwiseNot(uimage, uimage); Emgu.CV.CvInvoke.BitwiseNot(lineImage, lineImage); guardarArchivo(lineImage.Mat, "3imagenmascara"); guardarArchivo(uimage, "4imagenLineas"); Emgu.CV.CvInvoke.BitwiseAnd(uimage, lineImage, imgret); Emgu.CV.CvInvoke.BitwiseNot(imgret, imgret); guardarArchivo(imgret, "5imagensinLineas"); return(imgret); }
private void tagImageWithResult(string imageType, Emgu.CV.Image <Emgu.CV.Structure.Gray, byte> image) { if (imageType.CompareTo("FOUND_OK") == 0) { // add a white square 10% of image size image.Draw(new System.Drawing.Rectangle(0, 0, image.Width / 10, image.Height / 10), GRAY_WHITE, 0); } if (imageType.CompareTo("FOUND_WRONG") == 0) { // add a white square 10% of image size image.Draw(new Emgu.CV.Structure.CircleF(new System.Drawing.PointF(0, 0), image.Width / 10), GRAY_WHITE, 0); } if (imageType.CompareTo("NOT_FOUND") == 0) { // add a white square 10% of image size image.Draw(new Emgu.CV.Structure.Cross2DF(new System.Drawing.PointF(image.Width / 20, image.Height / 20), image.Width / 10, image.Height / 10), GRAY_WHITE, 0); } }
public void DrawObj(Emgu.CV.Image <Emgu.CV.Structure.Bgr, byte> image) { image.Draw(BestResult, new Bgr(m_ObjCor), 2); /*foreach (MCvAvgComp f in m_MouthDetected[0]) * { * //draw the mouth detected in the 0th channel with color * image.Draw(f.rect, new Bgr(Color.Red), 2); * }*/ }
private void server_GetImage(object sender, MessageImg e) { #if USE_EMGUCV Emgu.CV.Image <Emgu.CV.Structure.Bgr, byte> image = this.capture.QueryFrame(); string dateAndTime = DateTime.Now.ToString("yyyy.MM.dd/HH:mm:ss.fff", System.Globalization.DateTimeFormatInfo.InvariantInfo); image.Draw(dateAndTime, ref f, new Point(10, 30), new Emgu.CV.Structure.Bgr(0, 255, 0)); e.Image = image.Bitmap; #elif NOT_USE_EMGUCV e.Image = GetImage(); #endif }
/// <summary> /// Draw a visual indication of the pattern coordinate frame /// </summary> /// <param name="img">Image to draw to</param> /// <param name="ecp">Extrinsic calibration</param> /// <param name="icp">Intrinsic calibration</param> public static void DrawCoordinateFrame( Emgu.CV.Image <Bgr, Byte> img, Emgu.CV.ExtrinsicCameraParameters ecp, Emgu.CV.IntrinsicCameraParameters icp) { float extension = img.Width / 10; PointF[] coords = Emgu.CV.CameraCalibration.ProjectPoints( new MCvPoint3D32f[] { new MCvPoint3D32f(0, 0, 0), new MCvPoint3D32f(extension, 0, 0), new MCvPoint3D32f(0, extension, 0), new MCvPoint3D32f(0, 0, extension), }, ecp, icp); img.Draw(new LineSegment2DF(coords[0], coords[1]), new Bgr(System.Drawing.Color.Red), 2); img.Draw(new LineSegment2DF(coords[0], coords[2]), new Bgr(System.Drawing.Color.Green), 2); img.Draw(new LineSegment2DF(coords[0], coords[3]), new Bgr(System.Drawing.Color.Blue), 2); }
/// <summary> /// Draw pattern to image. /// </summary> /// <param name="img">Colored image to draw to.</param> /// <param name="image_points">Image center points.</param> /// <param name="pattern_found">If true green indicators are drawn, red ones otherwise.</param> public virtual void DrawPattern(Emgu.CV.Image <Bgr, Byte> img, PointF[] image_points, bool pattern_found) { if (image_points == null) { return; } System.Drawing.Color color = pattern_found ? System.Drawing.Color.Green : System.Drawing.Color.Red; Bgr bgr = new Bgr(color); MCvFont f = new MCvFont(Emgu.CV.CvEnum.FONT.CV_FONT_HERSHEY_PLAIN, 0.8, 0.8); int count = 1; foreach (PointF point in image_points) { img.Draw(new CircleF(point, 4), bgr, 2); Point p = point.ToNearestPoint(); img.Draw(count.ToString(), ref f, new System.Drawing.Point(p.X + 5, p.Y - 5), bgr); count++; } }
public virtual void ProcessImage(Emgu.CV.Image <Emgu.CV.Structure.Bgr, byte> image) { Emgu.CV.Image <Gray, byte> gray = image.Convert <Gray, byte>(); gray._ThresholdBinary(new Gray(_threshold), new Gray(255.0)); gray._Not(); Parsley.Core.EllipseDetector ed = new Parsley.Core.EllipseDetector(); ed.MinimumContourCount = _min_contour_count; List <Parsley.Core.DetectedEllipse> ellipses = new List <Parsley.Core.DetectedEllipse>(ed.DetectEllipses(gray)); List <Parsley.Core.DetectedEllipse> finals = new List <Parsley.Core.DetectedEllipse>( ellipses.Where(e => { return(e.Rating < _distance_threshold); }) ); finals.Sort( (a, b) => { double dista = a.Ellipse.MCvBox2D.center.X * a.Ellipse.MCvBox2D.center.X + a.Ellipse.MCvBox2D.center.Y * a.Ellipse.MCvBox2D.center.Y; double distb = b.Ellipse.MCvBox2D.center.X * b.Ellipse.MCvBox2D.center.X + b.Ellipse.MCvBox2D.center.Y * b.Ellipse.MCvBox2D.center.Y; return(dista.CompareTo(distb)); } ); Bgr bgr = new Bgr(0, 255, 0); MCvFont f = new MCvFont(Emgu.CV.CvEnum.FONT.CV_FONT_HERSHEY_PLAIN, 0.8, 0.8); int count = 1; foreach (Parsley.Core.DetectedEllipse e in finals) { image.Draw(e.Ellipse, bgr, 2); image.Draw(count.ToString(), ref f, new System.Drawing.Point((int)e.Ellipse.MCvBox2D.center.X, (int)e.Ellipse.MCvBox2D.center.Y), bgr); count++; } }
public static void VisualizeError( Emgu.CV.Image <Bgr, Byte> img, Emgu.CV.ExtrinsicCameraParameters ecp, Emgu.CV.IntrinsicCameraParameters icp, double[] deviations, Vector[] isect_points) { Bgr bgr = new Bgr(System.Drawing.Color.Green); MCvFont f = new MCvFont(Emgu.CV.CvEnum.FONT.CV_FONT_HERSHEY_PLAIN, 0.8, 0.8); foreach (Vector p in isect_points) { System.Drawing.PointF[] coords = Emgu.CV.CameraCalibration.ProjectPoints( new MCvPoint3D32f[] { p.ToEmguF() }, ecp, icp ); img.Draw(new CircleF(coords[0], 1), bgr, 1); } }
/// <summary> /// Draw rectangle to image /// </summary> /// <param name="o">Rectangle</param> /// <param name="img">Image</param> public void DrawIndicator(object o, Emgu.CV.Image <Emgu.CV.Structure.Bgr, byte> img) { img.Draw((Rectangle)o, new Emgu.CV.Structure.Bgr(Color.Green), 1); }
public void ProcessImage(Emgu.CV.Image <Emgu.CV.Structure.Bgr, byte> image) { Emgu.CV.Image <Gray, byte> gray = image.Convert <Gray, byte>(); Emgu.CV.Image <Gray, byte> binary = new Image <Gray, byte>(image.Size); CvInvoke.cvThreshold(gray, binary, 40, 255, THRESH.CV_THRESH_BINARY | THRESH.CV_THRESH_OTSU); binary._Not(); Emgu.CV.Contour <System.Drawing.Point> contour_points = binary.FindContours(); MemStorage storage = new MemStorage(); Matrix <double> warp = new Matrix <double>(3, 3); while (contour_points != null) { Contour <Point> c = contour_points.ApproxPoly(contour_points.Perimeter * 0.05, storage); double p = c.Perimeter; if (c.Total == 4 && p > 300) { PointF[] src = new PointF[] { new PointF(c[0].X, c[0].Y), new PointF(c[1].X, c[1].Y), new PointF(c[2].X, c[2].Y), new PointF(c[3].X, c[3].Y) }; CvInvoke.cvGetPerspectiveTransform(src, _dest, warp); int flags = (int)INTER.CV_INTER_LINEAR + (int)WARP.CV_WARP_FILL_OUTLIERS; CvInvoke.cvWarpPerspective(gray, _roi, warp, flags, new MCvScalar(0)); double min_error; Orientation orient; FindBestOrientation(out min_error, out orient); if (min_error < 0.4) { image.DrawPolyline(c.ToArray(), true, new Bgr(Color.Green), 2); System.Console.WriteLine(min_error + " " + orient); switch (orient) { case Orientation.Degrees0: image.Draw(new LineSegment2D(c[0], c[3]), new Bgr(System.Drawing.Color.Red), 2); break; case Orientation.Degrees90: image.Draw(new LineSegment2D(c[1], c[0]), new Bgr(System.Drawing.Color.Red), 2); break; case Orientation.Degrees180: image.Draw(new LineSegment2D(c[2], c[1]), new Bgr(System.Drawing.Color.Red), 2); break; case Orientation.Degrees270: image.Draw(new LineSegment2D(c[3], c[2]), new Bgr(System.Drawing.Color.Red), 2); break; } } // 0 degrees } contour_points = contour_points.HNext; } }