public Bitmap DrawPoints(Color color) { Bitmap bmp = new Bitmap(n, n, PixelFormat.Format24bppRgb); PointsMarker pm = new PointsMarker(Points, color); return(pm.Apply(bmp)); }
public void FindTest() { List <IntPoint> contour = new List <IntPoint>(); int max = 100; for (int i = 0; i < max; i++) { add(contour, i, max); } for (int i = 0; i < max; i++) { add(contour, max, i); } for (int i = 0; i < max; i++) { add(contour, 0, i); } for (int i = 0; i < max / 2; i++) { add(contour, i, i); } for (int i = 0; i < max / 2; i++) { add(contour, i + max / 2, max / 2 - i); } PointsMarker marker = new PointsMarker(contour); var bitmap = AForge.Imaging.Image.CreateGrayscaleImage(max + 1, max + 1); bitmap = marker.Apply(bitmap); // ImageBox.Show(bitmap); GrahamConvexHull graham = new GrahamConvexHull(); List <IntPoint> hull = graham.FindHull(contour); ConvexHullDefects hullDefects = new ConvexHullDefects(10); List <ConvexityDefect> defects = hullDefects.FindDefects(contour, hull); Assert.AreEqual(1, defects.Count); Assert.AreEqual(99, defects[0].Depth); }
public void ApplyTest1() { string basePath = Path.Combine(NUnit.Framework.TestContext.CurrentContext.TestDirectory, "watershed"); Directory.CreateDirectory(basePath); Bitmap shapes = Accord.Imaging.Image.Clone(Resources.water); shapes.Save(Path.Combine(basePath, "shapes.jpg")); var bw = new BinaryWatershed(); Bitmap result = bw.Apply(shapes); Assert.AreEqual(746, result.Width); Assert.AreEqual(643, result.Height); Assert.AreEqual(PixelFormat.Format8bppIndexed, result.PixelFormat); Assert.AreEqual(9, bw.MaxPoints.Count); string strX = bw.MaxPoints.Select(i => i.X).ToArray().ToCSharp(); string strY = bw.MaxPoints.Select(i => i.Y).ToArray().ToCSharp(); double[] x = new double[] { 310, 546, 136, 254, 429, 612, 398, 345, 498 }; double[] y = new double[] { 436, 153, 392, 201, 336, 339, 242, 183, 319 }; Assert.AreEqual(x, bw.MaxPoints.Select(i => i.X).ToArray()); Assert.AreEqual(y, bw.MaxPoints.Select(i => i.Y).ToArray()); result.Save(Path.Combine(basePath, "watershed.jpg")); GrayscaleToRGB toRGB = new GrayscaleToRGB(); result = toRGB.Apply(result); PointsMarker marker = new PointsMarker(Color.Red, 5); marker.Points = bw.MaxPoints; Bitmap marked = marker.Apply(result); marked.Save(Path.Combine(basePath, "watershed-marks.jpg")); Assert.IsNotNull(result); Assert.IsNotNull(marked); }
public static Bitmap MarkPoints(Bitmap bmp, List <IntPoint> points, Color color) { PointsMarker pm = new PointsMarker(points, color); return(pm.Apply(bmp)); }
/// <summary> /// takes the video and process it two frames at a time to calculate /// optical flow features and save them on the disk. /// </summary> /// <param name="vid">Path of the video on the disk.</param> /// <param name="save_path">Path to save the features on the disk.</param> /// <returns></returns> public void Extract_Featurers2(String vid, String save_path) { int mm = 0; try { mag = new Mat(); ang = new Mat(); frame = new Mat(); prev_frame = new Mat(); cap = new VideoCapture(vid); total_frames = Convert.ToInt32(cap.GetCaptureProperty(CapProp.FrameCount)); F_L = new List <int>(); frame = cap.QueryFrame(); prev_frame = frame; Console.WriteLine(total_frames); } catch (NullReferenceException except) { Console.WriteLine(except.Message); } //17900 while (mm < total_frames - 2) { try { prev_frame = frame; frame = cap.QueryFrame(); Bitmap image = new Bitmap(frame.Bitmap); // Create a new FAST Corners Detector FastCornersDetector fast = new FastCornersDetector() { Suppress = true, // suppress non-maximum points Threshold = 70 // less leads to more corners }; // Process the image looking for corners List <IntPoint> points = fast.ProcessImage(image); // Create a filter to mark the corners PointsMarker marker = new PointsMarker(points); // Apply the corner-marking filter Bitmap markers = marker.Apply(image); // Show on the screen //Accord.Controls.ImageBox.Show(markers); // Use it to extract interest points from the Lena image: List <IntPoint> descriptors = fast.ProcessImage(image); PointF[] features = new PointF[descriptors.Count]; int c = 0; foreach (IntPoint p in descriptors) { features[c] = new PointF(p.X, p.Y); c++; } ImageViewer viewer = new ImageViewer(); Image <Gray, Byte> prev_grey_img = new Image <Gray, byte>(frame.Width, frame.Height); Image <Gray, Byte> curr_grey_img = new Image <Gray, byte>(frame.Width, frame.Height); curr_grey_img = frame.ToImage <Gray, byte>(); prev_grey_img = prev_frame.ToImage <Gray, Byte>(); PointF[] shiftedFeatures; Byte[] status; float[] trackErrors; CvInvoke.CalcOpticalFlowPyrLK(prev_grey_img, curr_grey_img, features, new Size(9, 9), 3, new MCvTermCriteria(20, 0.05), out shiftedFeatures, out status, out trackErrors); //Image<Gray, Byte> displayImage = cap.QueryFrame().ToImage<Gray, Byte>(); //for (int i = 0; i < features.Length; i++) // displayImage.Draw(new LineSegment2DF(features[i], shiftedFeatures[i]), new Gray(), 2); for (int i = 0; i < features.Length; i++) { CvInvoke.Circle(frame, System.Drawing.Point.Round(shiftedFeatures[i]), 4, new MCvScalar(0, 255, 255), 2); } int mean_X = 0; int mean_Y = 0; foreach (PointF p in shiftedFeatures) { mean_X += (int)p.X; mean_Y += (int)p.Y; } mean_X /= shiftedFeatures.Length; mean_Y /= shiftedFeatures.Length; F_L.Add(mean_X); F_L.Add(mean_Y); //double[] inner = new double[] { mean_X, mean_Y }; //featuers_list[mm] = inner; //viewer.Image = frame; //viewer.ShowDialog(); //prev_frame = frame; //Console.WriteLine("frame:{0} " + mm); Console.WriteLine("frame:{0} " + mm + " X:{1} " + mean_X + " Y:{2} " + mean_Y); mm++; } catch (Exception e) { Console.WriteLine(e.Message); } } //int go = 0; //foreach (double[] arr in featuers_list) //{ // Console.Write("frame:{0} ", go++); // foreach (double d in arr) // Console.Write(d + " "); // Console.WriteLine(); //} Serialize.SerializeObject(F_L, save_path); }