public void ExampleTest() { Bitmap lena = Resources.lena512; // The freak detector can be used with any other corners detection // algorithm. The default corners detection method used is the FAST // corners detection. So, let's start creating this detector first: // var detector = new FastCornersDetector(60); // Now that we have a corners detector, we can pass it to the FREAK // feature extraction algorithm. Please note that if we leave this // parameter empty, FAST will be used by default. // var freak = new FastRetinaKeypointDetector(detector); // Now, all we have to do is to process our image: List<FastRetinaKeypoint> points = freak.ProcessImage(lena); // Afterwards, we should obtain 83 feature points. We can inspect // the feature points visually using the FeaturesMarker class as // FeaturesMarker marker = new FeaturesMarker(points, scale: 20); // And showing it on screen with // ImageBox.Show(marker.Apply(lena)); // We can also inspect the feature vectors (descriptors) associated // with each feature point. In order to get a descriptor vector for // any given point, we can use // byte[] feature = points[42].Descriptor; // By default, feature vectors will have 64 bytes in length. We can also // display those vectors in more readable formats such as HEX or base64 // string hex = points[42].ToHex(); string b64 = points[42].ToBase64(); // The above base64 result should be: // // "3W8M/ev///ffbr/+v3f34vz//7X+f0609v//+++/1+jfq/e83/X5/+6ft3//b4uaPZf7ePb3n/P93/rIbZlf+g==" // Assert.AreEqual(83, points.Count); Assert.AreEqual(64, feature.Length); Assert.AreEqual("3W8M/ev///ffbr/+v3f34vz//7X+f0609v//+++/1+jfq/e83/X5/+6ft3//b4uaPZf7ePb3n/P93/rIbZlf+g==", b64); }
private void btnSurf_Click(object sender, EventArgs e) { // Step 1: Detect feature points using Surf Corners Detector SpeededUpRobustFeaturesDetector surf = new SpeededUpRobustFeaturesDetector(); surfPoints1 = surf.ProcessImage(img1).ToArray(); surfPoints2 = surf.ProcessImage(img2).ToArray(); // Show the marked points in the original images Bitmap img1mark = new FeaturesMarker(surfPoints1).Apply(img1); Bitmap img2mark = new FeaturesMarker(surfPoints2).Apply(img2); // Concatenate the two images together in a single image (just to show on screen) Concatenate concatenate = new Concatenate(img1mark); pictureBox.Image = concatenate.Apply(img2mark); }
private void button1_Click(object sender, EventArgs e) { // Open a image Bitmap lenna = Properties.Resources.lena512; float threshold = (float)numThreshold.Value; int octaves = (int)numOctaves.Value; int initial = (int)numInitial.Value; // Create a new SURF Features Detector using the given parameters SpeededUpRobustFeaturesDetector surf = new SpeededUpRobustFeaturesDetector(threshold, octaves, initial); List<SpeededUpRobustFeaturePoint> points = surf.ProcessImage(lenna); // Create a new AForge's Corner Marker Filter FeaturesMarker features = new FeaturesMarker(points); // Apply the filter and display it on a picturebox pictureBox1.Image = features.Apply(lenna); }
private void button1_Click(object sender, EventArgs e) { // Open a image Bitmap lenna = Surf.Properties.Resources.lena512; // Create a new SURF Features Detector using the given parameters SpeededUpRobustFeaturesDetector surf = new SpeededUpRobustFeaturesDetector(0.0002f, 5, 2); var points = surf.ProcessImage(lenna); // Get the SURF Features Descriptor from the detector SurfDescriptor descriptor = surf.GetDescriptor(); descriptor.Describe(points); // Create a new AForge's Corner Marker Filter FeaturesMarker features = new FeaturesMarker(points.ToArray()); // Apply the filter and display it on a picturebox pictureBox1.Image = features.Apply(lenna); }
public void ProcessImageTest4() { Bitmap[] bitmaps = { Resources.flower01, Resources.flower03, Resources.flower06, Resources.flower07, Resources.flower09, Resources.flower10, }; var surf = new SpeededUpRobustFeaturesDetector(); int current = 0; foreach (Bitmap img in bitmaps) { List<SpeededUpRobustFeaturePoint> expected; List<SpeededUpRobustFeaturePoint> actual; // Create OpenSURF detector by Chris Evans { // Create Integral Image OpenSURFcs.IntegralImage iimg = OpenSURFcs.IntegralImage.FromImage(img); // Extract the interest points var pts = OpenSURFcs.FastHessian.getIpoints(0.0002f, 5, 2, iimg); // Describe the interest points OpenSURFcs.SurfDescriptor.DecribeInterestPoints(pts, false, false, iimg); expected = new List<SpeededUpRobustFeaturePoint>(); foreach (var p in pts) expected.Add(new SpeededUpRobustFeaturePoint(p.x, p.y, p.scale, p.laplacian, p.orientation, p.response)); } // Create Accord.NET SURF detector (based on OpenSURF by Chris Evans) { actual = surf.ProcessImage(img); } var img1 = new FeaturesMarker(actual).Apply(img); var img2 = new FeaturesMarker(expected).Apply(img); // ImageBox.Show(new Concatenate(img1).Apply(img2), PictureBoxSizeMode.Zoom); current++; for (int i = 0; i < expected.Count; i++) { var e = expected[i]; var a = actual[i]; Assert.AreEqual(e, a); } } }
private Bitmap CompareAndDrawImage(Bitmap modelImage, Bitmap observedImage, SurfSettings setting) { Stopwatch watch1 = new Stopwatch(); Stopwatch watch2 = new Stopwatch(); Bitmap returnBitmap; watch2.Start(); watch1.Reset(); watch1.Start(); double hessianThreshold = setting.HessianThresh.HasValue ? setting.HessianThresh.Value : 500; float hessianThreshold2 = (float)hessianThreshold / 1000000; Debug.WriteLine("hessianThreshold2: {0}", hessianThreshold2); SpeededUpRobustFeaturesDetector surf = new SpeededUpRobustFeaturesDetector(hessianThreshold2); List<SpeededUpRobustFeaturePoint> surfPoints1 = surf.ProcessImage(modelImage); List<SpeededUpRobustFeaturePoint> surfPoints2 = surf.ProcessImage(observedImage); Debug.WriteLine("Surf points count: {0}", surfPoints1.Count); Debug.WriteLine("Surf points count: {0}", surfPoints2.Count); //long memoryFootprint = MemorySize.GetBlobSizeinKb(surfPoints2); //Debug.WriteLine("Surf extractor: {0} kb", memoryFootprint); watch1.Stop(); Debug.WriteLine("Surf Detection tooked {0} ms", watch1.ElapsedMilliseconds); watch1.Reset(); watch1.Start(); // Show the marked points in the original images Bitmap img1mark = new FeaturesMarker(surfPoints1, 2).Apply(modelImage); Bitmap img2mark = new FeaturesMarker(surfPoints2, 2).Apply(observedImage); // Concatenate the two images together in a single image (just to show on screen) Concatenate concatenate = new Concatenate(img1mark); returnBitmap = concatenate.Apply(img2mark); watch1.Stop(); Debug.WriteLine("Surf point plotting tooked {0} ms", watch1.ElapsedMilliseconds); //watch1.Reset(); watch1.Start(); //List<IntPoint>[] coretionalMatches = getMatches(surfPoints1, surfPoints2); //watch1.Stop(); //Debug.WriteLine("Correctional Match tooked {0} ms", watch1.ElapsedMilliseconds); //// Get the two sets of points //IntPoint[] correlationPoints11 = coretionalMatches[0].ToArray(); //IntPoint[] correlationPoints22 = coretionalMatches[1].ToArray(); //Debug.WriteLine("Correclation points count: {0}", correlationPoints11.Length); //Debug.WriteLine("Correclation points count: {0}", correlationPoints22.Length); Debug.WriteLine("Threshold: {0}", setting.UniquenessThreshold.Value); watch1.Reset(); watch1.Start(); // Step 2: Match feature points using a k-NN KNearestNeighborMatching matcher = new KNearestNeighborMatching(2); matcher.Threshold = setting.UniquenessThreshold.Value; IntPoint[][] matches = matcher.Match(surfPoints1, surfPoints2); watch1.Stop(); Debug.WriteLine("Knn Match tooked {0} ms", watch1.ElapsedMilliseconds); // Get the two sets of points IntPoint[] correlationPoints1 = matches[0]; IntPoint[] correlationPoints2 = matches[1]; Debug.WriteLine("Knn points count: {0}", correlationPoints1.Length); Debug.WriteLine("Knn points count: {0}", correlationPoints2.Length); //watch1.Reset(); watch1.Start(); //// Show the marked correlations in the concatenated image //PairsMarker pairs = new PairsMarker( // correlationPoints1, // Add image1's width to the X points to show the markings correctly // correlationPoints2.Apply(p => new IntPoint(p.X + modelImage.Width, p.Y)), Color.Blue); //returnBitmap = pairs.Apply(returnBitmap); //watch1.Stop(); //Debug.WriteLine("Match pair marking tooked {0} ms", watch1.ElapsedMilliseconds); if (correlationPoints1.Length < 4 || correlationPoints2.Length < 4) { MessageBox.Show("Insufficient points to attempt a fit."); return null; } watch1.Reset(); watch1.Start(); // Step 3: Create the homography matrix using a robust estimator //RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99); RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99); MatrixH homography = ransac.Estimate(correlationPoints1, correlationPoints2); watch1.Stop(); Debug.WriteLine("Ransac tooked {0} ms", watch1.ElapsedMilliseconds); watch1.Reset(); watch1.Start(); // Plot RANSAC results against correlation results IntPoint[] inliers1 = correlationPoints1.Submatrix(ransac.Inliers); IntPoint[] inliers2 = correlationPoints2.Submatrix(ransac.Inliers); watch1.Stop(); Debug.WriteLine("Ransac SubMatrix {0} ms", watch1.ElapsedMilliseconds); Debug.WriteLine("Ransac points count: {0}", inliers1.Length); Debug.WriteLine("Ransac points count: {0}", inliers2.Length); watch1.Reset(); watch1.Start(); PairsMarker inlierPairs = new PairsMarker( inliers1, // Add image1's width to the X points to show the markings correctly inliers2.Apply(p => new IntPoint(p.X + modelImage.Width, p.Y)), Color.Red); returnBitmap = inlierPairs.Apply(returnBitmap); watch1.Stop(); Debug.WriteLine("Ransac plotting tooked {0} ms", watch1.ElapsedMilliseconds); watch2.Stop(); return returnBitmap; }