public void ProcessImageTest() { UnmanagedImage image = UnmanagedImage.FromManagedImage(Properties.Resources.sample_black); FastCornersDetector target = new FastCornersDetector(); target.Suppress = false; target.Threshold = 20; List <IntPoint> actual = target.ProcessImage(image); /* * PointsMarker marker = new PointsMarker(actual.ToArray()); * marker.Width = 3; * marker.MarkerColor = Color.FromArgb(255, 0, 0); * var markers = marker.Apply(image); * ImageBox.Show(markers.ToManagedImage(), PictureBoxSizeMode.Zoom); */ Assert.AreEqual(237, actual.Count); Assert.AreEqual(404, actual[0].X); Assert.AreEqual(35, actual[0].Y); Assert.AreEqual(407, actual[6].X); Assert.AreEqual(36, actual[6].Y); Assert.AreEqual(407, actual[11].X); Assert.AreEqual(38, actual[11].Y); Assert.AreEqual(55, actual[65].X); Assert.AreEqual(135, actual[65].Y); Assert.AreEqual(103, actual[73].X); Assert.AreEqual(137, actual[73].Y); }
/// <summary> /// Features from Accelerated Segment Test (FAST) corners detector. /// <para>Accord.NET internal call. Please see: <see cref="Accord.Imaging.FastCornersDetector"/> for details.</para> /// </summary> /// <param name="im">Image.</param> /// <param name="threshold">The suppression threshold. Decreasing this value increases the number of points detected by the algorithm.</param> /// <returns>Interest point locations.</returns> public static List <IntPoint> CornerFeaturesDetector(this Image <Gray, byte> im, int threshold = 20) { FastCornersDetector fast = new FastCornersDetector(threshold); var points = fast.ProcessImage(im.ToAForgeImage(copyAlways: false, failIfCannotCast: true)); return(points); }
public void ProcessImageTest2() { UnmanagedImage image = UnmanagedImage.FromManagedImage(Properties.Resources.lena512); FastCornersDetector target = new FastCornersDetector(); target.Suppress = true; target.Threshold = 40; List <IntPoint> actual = target.ProcessImage(image); /* * PointsMarker marker = new PointsMarker(actual.ToArray()); * marker.Width = 3; * marker.MarkerColor = Color.FromArgb(255, 0, 0); * var markers = marker.Apply(image); * ImageBox.Show(markers.ToManagedImage(), PictureBoxSizeMode.Zoom); */ Assert.AreEqual(324, actual.Count); Assert.AreEqual(506, actual[0].X); Assert.AreEqual(4, actual[0].Y); Assert.AreEqual(152, actual[6].X); Assert.AreEqual(75, actual[6].Y); Assert.AreEqual(416, actual[11].X); Assert.AreEqual(115, actual[11].Y); Assert.AreEqual(140, actual[65].X); Assert.AreEqual(246, actual[65].Y); Assert.AreEqual(133, actual[73].X); Assert.AreEqual(253, actual[73].Y); }
/// <summary> /// Features from Accelerated Segment Test (FAST) corners detector. /// <para>Accord.NET internal call. Please see: <see cref="Accord.Imaging.FastCornersDetector"/> for details.</para> /// </summary> /// <param name="im">Image.</param> /// <param name="threshold">The suppression threshold. Decreasing this value increases the number of points detected by the algorithm.</param> /// <returns>Interest point locations.</returns> public static List <IntPoint> CornerFeaturesDetector(this Gray <byte>[,] im, int threshold = 20) { FastCornersDetector fast = new FastCornersDetector(threshold); List <IntPoint> points; using (var uImg = im.Lock()) { points = fast.ProcessImage(uImg.AsAForgeImage()); } return(points); }
public void GetCorners(int threshold, bool supress) { // create corners detector's instance FastCornersDetector fcd = new FastCornersDetector() { Suppress = supress, Threshold = threshold }; // Apply the filter and return the points List <IntPoint> corners = fcd.ProcessImage(AForge.Imaging.Image.FromFile(this.CurrentImage)); if (ImageComplete != null) { ImageComplete(corners); } }
public void batch_test() { Bitmap[] images = { Accord.Imaging.Image.Clone(Resources.flower01), Accord.Imaging.Image.Clone(Resources.flower02), Accord.Imaging.Image.Clone(Resources.flower03), Accord.Imaging.Image.Clone(Resources.flower04), Accord.Imaging.Image.Clone(Resources.flower05), Accord.Imaging.Image.Clone(Resources.flower06), }; FastCornersDetector target = new FastCornersDetector(); for (int i = 0; i < images.Length; i++) { List <IntPoint> actual = target.ProcessImage(images[i]); Assert.IsNotNull(actual); } }
public void doc_test() { string localPath = TestContext.CurrentContext.TestDirectory; #region doc_apply // Let's load an example image, such as Lena, // from a standard dataset of example images: var images = new TestImages(path: localPath); Bitmap lena = images["lena.bmp"]; // Create FAST with the default parameter values: var fast = new FastCornersDetector(threshold: 20); // Use it to extract interest points from the Lena image: List <IntPoint> descriptors = fast.ProcessImage(lena); // Now those descriptors can be used to represent the image itself, such // as for example, in the Bag-of-Visual-Words approach for classification. #endregion Assert.AreEqual(1144, descriptors.Count); }
public void ProcessImageTest2() { UnmanagedImage image = UnmanagedImage.FromManagedImage(Accord.Imaging.Image.Clone(Resources.lena512)); FastCornersDetector target = new FastCornersDetector(); target.Suppress = true; target.Threshold = 40; List <IntPoint> actual = target.ProcessImage(image); Assert.AreEqual(324, actual.Count); Assert.AreEqual(506, actual[0].X); Assert.AreEqual(4, actual[0].Y); Assert.AreEqual(152, actual[6].X); Assert.AreEqual(75, actual[6].Y); Assert.AreEqual(416, actual[11].X); Assert.AreEqual(115, actual[11].Y); Assert.AreEqual(140, actual[65].X); Assert.AreEqual(246, actual[65].Y); Assert.AreEqual(133, actual[73].X); Assert.AreEqual(253, actual[73].Y); }
public void ProcessImageTest() { UnmanagedImage image = UnmanagedImage.FromManagedImage(Accord.Imaging.Image.Clone(Resources.sample_black)); FastCornersDetector target = new FastCornersDetector(); target.Suppress = false; target.Threshold = 20; List <IntPoint> actual = target.ProcessImage(image); Assert.AreEqual(237, actual.Count); Assert.AreEqual(404, actual[0].X); Assert.AreEqual(35, actual[0].Y); Assert.AreEqual(407, actual[6].X); Assert.AreEqual(36, actual[6].Y); Assert.AreEqual(407, actual[11].X); Assert.AreEqual(38, actual[11].Y); Assert.AreEqual(55, actual[65].X); Assert.AreEqual(135, actual[65].Y); Assert.AreEqual(103, actual[73].X); Assert.AreEqual(137, actual[73].Y); }
/// <summary> /// takes the video and process it two frames at a time to calculate /// optical flow features and save them on the disk. /// </summary> /// <param name="vid">Path of the video on the disk.</param> /// <param name="save_path">Path to save the features on the disk.</param> /// <returns></returns> public void Extract_Featurers2(String vid, String save_path) { int mm = 0; try { mag = new Mat(); ang = new Mat(); frame = new Mat(); prev_frame = new Mat(); cap = new VideoCapture(vid); total_frames = Convert.ToInt32(cap.GetCaptureProperty(CapProp.FrameCount)); F_L = new List <int>(); frame = cap.QueryFrame(); prev_frame = frame; Console.WriteLine(total_frames); } catch (NullReferenceException except) { Console.WriteLine(except.Message); } //17900 while (mm < total_frames - 2) { try { prev_frame = frame; frame = cap.QueryFrame(); Bitmap image = new Bitmap(frame.Bitmap); // Create a new FAST Corners Detector FastCornersDetector fast = new FastCornersDetector() { Suppress = true, // suppress non-maximum points Threshold = 70 // less leads to more corners }; // Process the image looking for corners List <IntPoint> points = fast.ProcessImage(image); // Create a filter to mark the corners PointsMarker marker = new PointsMarker(points); // Apply the corner-marking filter Bitmap markers = marker.Apply(image); // Show on the screen //Accord.Controls.ImageBox.Show(markers); // Use it to extract interest points from the Lena image: List <IntPoint> descriptors = fast.ProcessImage(image); PointF[] features = new PointF[descriptors.Count]; int c = 0; foreach (IntPoint p in descriptors) { features[c] = new PointF(p.X, p.Y); c++; } ImageViewer viewer = new ImageViewer(); Image <Gray, Byte> prev_grey_img = new Image <Gray, byte>(frame.Width, frame.Height); Image <Gray, Byte> curr_grey_img = new Image <Gray, byte>(frame.Width, frame.Height); curr_grey_img = frame.ToImage <Gray, byte>(); prev_grey_img = prev_frame.ToImage <Gray, Byte>(); PointF[] shiftedFeatures; Byte[] status; float[] trackErrors; CvInvoke.CalcOpticalFlowPyrLK(prev_grey_img, curr_grey_img, features, new Size(9, 9), 3, new MCvTermCriteria(20, 0.05), out shiftedFeatures, out status, out trackErrors); //Image<Gray, Byte> displayImage = cap.QueryFrame().ToImage<Gray, Byte>(); //for (int i = 0; i < features.Length; i++) // displayImage.Draw(new LineSegment2DF(features[i], shiftedFeatures[i]), new Gray(), 2); for (int i = 0; i < features.Length; i++) { CvInvoke.Circle(frame, System.Drawing.Point.Round(shiftedFeatures[i]), 4, new MCvScalar(0, 255, 255), 2); } int mean_X = 0; int mean_Y = 0; foreach (PointF p in shiftedFeatures) { mean_X += (int)p.X; mean_Y += (int)p.Y; } mean_X /= shiftedFeatures.Length; mean_Y /= shiftedFeatures.Length; F_L.Add(mean_X); F_L.Add(mean_Y); //double[] inner = new double[] { mean_X, mean_Y }; //featuers_list[mm] = inner; //viewer.Image = frame; //viewer.ShowDialog(); //prev_frame = frame; //Console.WriteLine("frame:{0} " + mm); Console.WriteLine("frame:{0} " + mm + " X:{1} " + mean_X + " Y:{2} " + mean_Y); mm++; } catch (Exception e) { Console.WriteLine(e.Message); } } //int go = 0; //foreach (double[] arr in featuers_list) //{ // Console.Write("frame:{0} ", go++); // foreach (double d in arr) // Console.Write(d + " "); // Console.WriteLine(); //} Serialize.SerializeObject(F_L, save_path); }