/// <summary> /// Called when videoPlayer receives a new frame. /// </summary> /// <param name="sender"></param> /// <param name="image"></param> private void videoPlayer_NewFrame_1(object sender, ref Bitmap frame) { //if (cmb_fuente.SelectedIndex==1) //{ // RotateBicubic filter = new RotateBicubic(90, true); // frame = filter.Apply(frame); //} if (previousFrame != null) { // find the thresholded euclidian difference between two subsequent frames //ThresholdedEuclideanDifference threshold = new ThresholdedEuclideanDifference(40); ThresholdedEuclideanDifference threshold = new ThresholdedEuclideanDifference(thr); threshold.OverlayImage = previousFrame; var difference = threshold.Apply(frame); // only keep big blobs var filter = new BlobsFiltering(); filter.CoupledSizeFiltering = true; filter.MinHeight = int.Parse(txt_tamanio2.Text); filter.MinWidth = int.Parse(txt_tamanio2.Text); filter.ApplyInPlace(difference); //var sobl = new SobelEdgeDetector(); //sobl.ApplyInPlace(difference); //erode image var erode = new Erosion3x3(); for (int i = 0; i < int.Parse(txt_ers.Text); i++) { erode.ApplyInPlace(difference); //erode.ApplyInPlace(difference); //erode.ApplyInPlace(difference); } // dilate remaining blobs var dilate = new BinaryDilation3x3(); for (int i = 0; i < int.Parse(txt_dils.Text); i++) { dilate.ApplyInPlace(difference); //dilate.ApplyInPlace(difference); //dilate.ApplyInPlace(difference); //dilate.ApplyInPlace(difference); } // put this image in the thresholded picturebox thresholdedBox.Image = difference.Clone() as Bitmap; // use this as a mask for the current frame var mask = new ApplyMask(difference); var maskedFrame = mask.Apply(frame); // put this image in the masked picturebox maskedBox.Image = maskedFrame.Clone() as Bitmap; // now find all moving blobs if (frameIndex % 10 == 0) { var counter = new BlobCounter(); counter.ProcessImage(difference); // only keep blobs that: // - do not overlap with known cars // - do not overlap with other blobs // - have crossed the middle of the frame // - are at least 100 pixels tall var blobs = counter.GetObjectsRectangles(); var newBlobs = from r in counter.GetObjectsRectangles() where !trackers.Any(t => t.Tracker.TrackingObject.Rectangle.IntersectsWith(r)) && !blobs.Any(b => b.IntersectsWith(r) && b != r) && r.Top >= 240 && r.Bottom <= 480 && r.Height >= int.Parse(txt_tamanio.Text) select r; // set up new camshift trackers for each detected blob foreach (var rect in newBlobs) { trackers.Add(new TrackerType(rect, frameIndex, ++carIndex)); } } // now kill all car trackers that have expanded by too much trackers.RemoveAll(t => t.Tracker.TrackingObject.Rectangle.Height > 360); // and kill all trackers that have lived for 30 frames //trackers.RemoveAll(t => frameIndex - t.StartIndex > 30); trackers.RemoveAll(t => frameIndex - t.StartIndex > frames); // let all remaining trackers process the current frame var img = UnmanagedImage.FromManagedImage(maskedFrame); trackers .ForEach(t => t.Tracker.ProcessFrame(img)); // remember this frame for next iteration previousFrame.Dispose(); previousFrame = frame.Clone() as Bitmap; //escribir etiqueta para cada vehiculo var outputFrame = frame.Clone() as Bitmap; trackers .FindAll(t => !t.Tracker.TrackingObject.IsEmpty) .ForEach(t => DrawCarLabel(outputFrame, t.Tracker.TrackingObject.Rectangle, t.CarNumber)); // regresar frame procesado frame = outputFrame; } else { // recordar para siguiente iteracion previousFrame = frame.Clone() as Bitmap; } frameIndex++; //if (previousFrame != null) //{ // // find the thresholded euclidian difference between two subsequent frames // ThresholdedEuclideanDifference threshold = new ThresholdedEuclideanDifference(40); // threshold.OverlayImage = previousFrame; // var difference = threshold.Apply(frame); // // only keep big blobs // var filter = new BlobsFiltering(); // filter.CoupledSizeFiltering = true; // filter.MinHeight = 50; // filter.MinWidth = 50; // filter.ApplyInPlace(difference); // // dilate remaining blobs // var dilate = new BinaryDilation3x3(); // dilate.ApplyInPlace(difference); // //dilate.ApplyInPlace(difference); // //dilate.ApplyInPlace(difference); // //dilate.ApplyInPlace(difference); // // put this image in the thresholded picturebox // thresholdedBox.Image = difference.Clone() as Bitmap; // // use this as a mask for the current frame // var mask = new ApplyMask(difference); // var maskedFrame = mask.Apply(frame); // // put this image in the masked picturebox // maskedBox.Image = maskedFrame.Clone() as Bitmap; // // now find all moving blobs // if (frameIndex % 10 == 0) // { // var counter = new BlobCounter(); // counter.ProcessImage(difference); // // only keep blobs that: // // - do not overlap with known cars // // - do not overlap with other blobs // // - have crossed the middle of the frame // // - are at least 100 pixels tall // var blobs = counter.GetObjectsRectangles(); // var newBlobs = from r in counter.GetObjectsRectangles() // where !trackers.Any(t => t.Tracker.TrackingObject.Rectangle.IntersectsWith(r)) // && !blobs.Any(b => b.IntersectsWith(r) && b != r) // && r.Top >= 240 && r.Bottom <= 480 // && r.Height >= 100 // select r; // // set up new camshift trackers for each detected blob // foreach (var rect in newBlobs) // { // trackers.Add(new TrackerType(rect, frameIndex, ++carIndex)); // } // } // // now kill all car trackers that have expanded by too much // trackers.RemoveAll(t => t.Tracker.TrackingObject.Rectangle.Height > 360); // // and kill all trackers that have lived for 30 frames // trackers.RemoveAll(t => frameIndex - t.StartIndex > 30); // // let all remaining trackers process the current frame // var img = UnmanagedImage.FromManagedImage(maskedFrame); // trackers // .ForEach(t => t.Tracker.ProcessFrame(img)); // // remember this frame for next iteration // previousFrame.Dispose(); // previousFrame = frame.Clone() as Bitmap; // // draw labels on all tracked cars // var outputFrame = frame.Clone() as Bitmap; // trackers // .FindAll(t => !t.Tracker.TrackingObject.IsEmpty) // .ForEach(t => DrawCarLabel(outputFrame, t.Tracker.TrackingObject.Rectangle, t.CarNumber)); // // return the processed frame to the video // frame = outputFrame; //} //// or else just remember this frame for next iteration //else // previousFrame = frame.Clone() as Bitmap; //frameIndex++; }
static void Main(string[] args) { Threshold thresh = new Threshold(10); Median median = new Median(9); Erosion3x3 erode = new Erosion3x3(); Dilatation3x3 dilate = new Dilatation3x3(); GrahamConvexHull hullFinder = new GrahamConvexHull(); ConnectedComponentsLabeling ccLabeler = new ConnectedComponentsLabeling(); BorderFollowing contourFinder = new BorderFollowing(); GrayscaleToRGB rgb = new GrayscaleToRGB(); ConvexHullDefects defectFinder = new ConvexHullDefects(10); Bitmap img = (Bitmap)Bitmap.FromFile("hand3.jpg"); Bitmap image = Grayscale.CommonAlgorithms.BT709.Apply(img); thresh.ApplyInPlace(image); //median.ApplyInPlace(image); erode.ApplyInPlace(image); dilate.ApplyInPlace(image); BlobCounter counter = new BlobCounter(image); counter.ObjectsOrder = ObjectsOrder.Area; Blob[] blobs = counter.GetObjectsInformation(); if (blobs.Length > 0) { counter.ExtractBlobsImage(image, blobs[0], true); UnmanagedImage hand = blobs[0].Image; var contour = contourFinder.FindContour(hand); if (contour.Count() > 0) { var initialHull = hullFinder.FindHull(contour); var defects = defectFinder.FindDefects(contour, initialHull); var filteredHull = initialHull.ClusterHullPoints().FilterLinearHullPoints(); var palmCenter = defects.Centroid(contour); var wristPoints = filteredHull.SelectWristPoints(defects, contour); Bitmap color = rgb.Apply(hand).ToManagedImage(); //BitmapData data = color.LockBits(new Rectangle(0, 0, color.Width, color.Height), ImageLockMode.ReadWrite, color.PixelFormat); //Drawing.Polyline(data, contour, Color.Blue); //Drawing.Polygon(data, filteredHull, Color.Red); //color.UnlockBits(data); Graphics gr = Graphics.FromImage(color); gr.DrawPolygon(new Pen(Brushes.Red, 3), filteredHull.ToPtArray()); gr.DrawLines(new Pen(Brushes.Blue, 3), contour.ToPtArray()); gr.DrawEllipse(new Pen(Brushes.Red, 3), palmCenter.X - 10, palmCenter.Y - 10, 20, 20); foreach (ConvexityDefect defect in defects) { gr.DrawEllipse(new Pen(Brushes.Green, 6), contour[defect.Point].X - 10, contour[defect.Point].Y - 10, 20, 20); } foreach (AForge.IntPoint pt in filteredHull) { gr.DrawEllipse(new Pen(Brushes.Yellow, 6), pt.X - 10, pt.Y - 10, 20, 20); } foreach (AForge.IntPoint pt in wristPoints) { gr.DrawEllipse(new Pen(Brushes.PowderBlue, 6), pt.X - 10, pt.Y - 10, 20, 20); } ImageBox.Show(color); } } }
private void Cap_ImageGrabbed(object sender, EventArgs e) { try { Mat imagen = new Mat(); cap.Retrieve(imagen); //pb_lprptzanalitica.Image = imagen.Bitmap; Bitmap frame = new Bitmap(imagen.Bitmap); //pb_ipcam.Image = frame.Clone() as Bitmap; if (previousFrame != null) { // find the thresholded euclidian difference between two subsequent frames //ThresholdedEuclideanDifference threshold = new ThresholdedEuclideanDifference(40); ThresholdedEuclideanDifference threshold = new ThresholdedEuclideanDifference(thr); threshold.OverlayImage = previousFrame; var difference = threshold.Apply(frame.Clone() as Bitmap); // only keep big blobs var filter = new BlobsFiltering(); filter.CoupledSizeFiltering = true; filter.MinHeight = int.Parse(txt_tamanio2.Text); filter.MinWidth = int.Parse(txt_tamanio2.Text); filter.ApplyInPlace(difference); //var sobl = new SobelEdgeDetector(); //sobl.ApplyInPlace(difference); //erode image var erode = new Erosion3x3(); for (int i = 0; i < int.Parse(txt_ers.Text); i++) { erode.ApplyInPlace(difference); //erode.ApplyInPlace(difference); //erode.ApplyInPlace(difference); } // dilate remaining blobs var dilate = new BinaryDilation3x3(); for (int i = 0; i < int.Parse(txt_dils.Text); i++) { dilate.ApplyInPlace(difference); //dilate.ApplyInPlace(difference); //dilate.ApplyInPlace(difference); //dilate.ApplyInPlace(difference); } // put this image in the thresholded picturebox thresholdedBox.Image = difference.Clone() as Bitmap; // use this as a mask for the current frame var mask = new ApplyMask(difference); var maskedFrame = mask.Apply(frame); // put this image in the masked picturebox maskedBox.Image = maskedFrame.Clone() as Bitmap; // now find all moving blobs if (frameIndex % 10 == 0) { var counter = new BlobCounter(); counter.ProcessImage(difference); // only keep blobs that: // - do not overlap with known cars // - do not overlap with other blobs // - have crossed the middle of the frame // - are at least 100 pixels tall var blobs = counter.GetObjectsRectangles(); var newBlobs = from r in counter.GetObjectsRectangles() where !trackers.Any(t => t.Tracker.TrackingObject.Rectangle.IntersectsWith(r)) && !blobs.Any(b => b.IntersectsWith(r) && b != r) && r.Top >= 240 && r.Bottom <= 480 && r.Height >= int.Parse(txt_tamanio.Text) select r; // set up new camshift trackers for each detected blob foreach (var rect in newBlobs) { trackers.Add(new TrackerType(rect, frameIndex, ++carIndex)); } } // now kill all car trackers that have expanded by too much trackers.RemoveAll(t => t.Tracker.TrackingObject.Rectangle.Height > 360); // and kill all trackers that have lived for 30 frames //trackers.RemoveAll(t => frameIndex - t.StartIndex > 30); trackers.RemoveAll(t => frameIndex - t.StartIndex > frames); // let all remaining trackers process the current frame var img = UnmanagedImage.FromManagedImage(maskedFrame); trackers .ForEach(t => t.Tracker.ProcessFrame(img)); // remember this frame for next iteration previousFrame.Dispose(); previousFrame = frame.Clone() as Bitmap; //escribir etiqueta para cada vehiculo var outputFrame = frame.Clone() as Bitmap; trackers .FindAll(t => !t.Tracker.TrackingObject.IsEmpty) .ForEach(t => DrawCarLabel(outputFrame, t.Tracker.TrackingObject.Rectangle, t.CarNumber)); // regresar frame procesado frame = outputFrame; //pb_ipcam.Image = outputFrame; pb_lprptzanalitica.Image = outputFrame; } else { // recordar para siguiente iteracion previousFrame = frame.Clone() as Bitmap; } frameIndex++; } catch (Exception) { MessageBox.Show("Error al decodificar video de PTZ, LPR o ANALITICA"); } }
public void Video_NewFrame(object sender, NewFrameEventArgs eventArgs) { UnmanagedImage image = UnmanagedImage.FromManagedImage((Bitmap)eventArgs.Frame.Clone()); var extractChannel = new ExtractChannel(RGB.R); UnmanagedImage channel = extractChannel.Apply(image); // UnmanagedImage originalRed = channel.Clone(); if (true) { var threshold = new Threshold(200); threshold.ApplyInPlace(channel); ////filter to convert RGB image to 8bpp gray scale for image processing //IFilter gray_filter = new GrayscaleBT709(); //gray_image = gray_filter.Apply(gray_image); ////thresholding a image //Threshold th_filter = new Threshold(color_data.threshold); //th_filter.ApplyInPlace(gray_image); //erosion filter to filter out small unwanted pixels Erosion3x3 erosion = new Erosion3x3(); erosion.ApplyInPlace(channel); //dilation filter //Dilatation3x3 dilatation = new Dilatation3x3(); //dilatation.ApplyInPlace(channel); //GrayscaleToRGB filter = new GrayscaleToRGB(); //image = filter.Apply(channel); //ReplaceChannel replaceFilter = new ReplaceChannel(RGB.B, channel); //replaceFilter.ApplyInPlace(image); } BlobCounter bc = new BlobCounter(); //arrange blobs by area bc.ObjectsOrder = ObjectsOrder.Area; bc.FilterBlobs = true; bc.MinHeight = minObjectSize; bc.MinWidth = minObjectSize; bc.MaxHeight = maxObjectSize; bc.MaxWidth = maxObjectSize; //process image for blobs bc.ProcessImage(channel); channel.Dispose(); // if (motionDetector.ProcessFrame(image) > 0.02) { // for (int i = 0; i < blobCountingObjectsProcessing.ObjectRectangles.Length; i++) { Rectangle[] rectangles = bc.GetObjectsRectangles(); Blob[] blobs = bc.GetObjectsInformation(); for (int i = 0; i < bc.ObjectsCount; i++) { Rectangle rectangle = rectangles[i]; int width = rectangle.Width; int height = rectangle.Height; // if (width < maxObjectSize && height < maxObjectSize && width > minObjectSize && height > minObjectSize) { Drawing.Rectangle(image, rectangle, colorList[i % colorList.Length]); if (i == 0) { Position = GetCenterOfMass(image, rectangle); Drawing.FillRectangle(image, rectangle, Color.BlanchedAlmond); Drawing.FillRectangle(image, new Rectangle((int)Position.U - dotSize, (int)Position.V - dotSize, dotSize * 3, dotSize * 3), Color.Indigo); } // } } // } Image = image.ToManagedImage(); // videoForm.ImageDestination.Image = image.ToManagedImage(); }