Susan corners detector.

The class implements Susan corners detector, which is described by S.M. Smith in: S.M. Smith, "SUSAN - a new approach to low level image processing", Internal Technical Report TR95SMS1, Defense Research Agency, Chobham Lane, Chertsey, Surrey, UK, 1995.

Some implementation notes: Analyzing each pixel and searching for its USAN area, the 7x7 mask is used, which is comprised of 37 pixels. The mask has circle shape: xxx xxxxx xxxxxxx xxxxxxx xxxxxxx xxxxx xxx In the case if USAN's center of mass has the same coordinates as nucleus (central point), the pixel is not a corner. For noise suppression the 5x5 square window is used.

The class processes only grayscale 8 bpp and color 24/32 bpp images. In the case of color image, it is converted to grayscale internally using GrayscaleBT709 filter.

Sample usage:

// create corners detector's instance SusanCornersDetector scd = new SusanCornersDetector( ); // process image searching for corners List<IntPoint> corners = scd.ProcessImage( image ); // process points foreach ( IntPoint corner in corners ) { // ... }
Inheritance: ICornersDetector
Ejemplo n.º 1
0
        public void RansacLineConstructorTest2()
        {
            Accord.Math.Tools.SetupGenerator(0);

            Bitmap image = Resources.noise_line;

            //Accord.Controls.ImageBox.Show(image); 

            var detector = new SusanCornersDetector();

            List<IntPoint> cloud = detector.ProcessImage(image);
            Assert.AreEqual(211, cloud.Count);

            Bitmap marks = new PointsMarker(cloud, Color.Pink).Apply(image);
            //Accord.Controls.ImageBox.Show(marks);

            RansacLine ransac = new RansacLine(5, 1e-10);
            Line line = ransac.Estimate(cloud);

            Assert.AreEqual(0.501134932f, line.Intercept);
            Assert.AreEqual(-0.865369201f, line.Slope);

            //var result = new LineMarker(line).Apply(image);
            //Accord.Controls.ImageBox.Show(result);
        }
        //1. bright pixel / dark pixel
        //2.lowest gray level
        //3.highest gray level
        //4.number of peaks in the x direction.
        //5.number of peaks in the y direction.
        public static double[] ExtractFeatures(Bitmap bmp,int i)
        {
            //Apply GrayScale
            GrayscaleBT709 greyScaleFilter = new GrayscaleBT709();
            Bitmap newBmp = greyScaleFilter.Apply((Bitmap)bmp.Clone());

            //Count Blobs
            BlobCounter blobCounter = new BlobCounter();
            blobCounter.BackgroundThreshold = Color.FromArgb(255, 150, 150, 150);
            blobCounter.ProcessImage(newBmp);
            int blobs = (blobCounter.ObjectsCount - 1) * 30;

            //Count Corner
            SusanCornersDetector scd = new SusanCornersDetector();
            scd.DifferenceThreshold = 70;
            scd.GeometricalThreshold = 8;
            int corners = scd.ProcessImage((Bitmap)newBmp.Clone()).Count();

            //Apply Edge Filter
            CannyEdgeDetector filter = new CannyEdgeDetector();
            //newBmp = filter.Apply(newBmp);
            Histogram his = new HorizontalIntensityStatistics(newBmp).Gray;
            Histogram vis = new VerticalIntensityStatistics(newBmp).Gray;

            HoughLineTransformation lineTransform = new HoughLineTransformation();
            // apply Hough line transofrm
            lineTransform.ProcessImage(filter.Apply(newBmp));
            Bitmap houghLineImage = lineTransform.ToBitmap();
            // get lines using relative intensity
            HoughLine[] lines = lineTransform.GetLinesByRelativeIntensity(1);
            int linesCount = lines.Count() * 30;

            double[] features = new double[13] { blobs, corners, his.Max, his.Min, his.Mean, his.Median, his.StdDev,
                vis.Max, vis.Min, vis.Mean, vis.Median, vis.StdDev,linesCount};

            //double[] features = new double[3] { blobs, corners,lines};

            newBmp.Save(String.Format("test{0}.bmp",i));
            return features;
        }
Ejemplo n.º 3
0
        public void RansacLineConstructorTest2()
        {
            Bitmap image = Properties.Resources.noise_line;

            ImageBox.Show(image); 

            var detector = new SusanCornersDetector();

            List<IntPoint> cloud = detector.ProcessImage(image);

            Bitmap marks = new PointsMarker(cloud, Color.Pink).Apply(image);
            ImageBox.Show(marks);

            RansacLine ransac = new RansacLine(5, 1e-10);
            Line line = ransac.Estimate(cloud);

            Bitmap result = new LineMarker(line).Apply(image);
            ImageBox.Show(result);

            Assert.Fail();
        }
Ejemplo n.º 4
0
 public CornersMarkerProcessor()
 {
     m_detector = new SusanCornersDetector();
     m_filter = new CornersMarker(m_detector, Color.Red);
 }
        private void initiateButton_Click(object sender, RoutedEventArgs e)
        {
            // create grayscale filter (BT709)
            Grayscale filter = new Grayscale(0.2125, 0.7154, 0.0721);
            // apply the filter
            img1 = filter.Apply(img1);
            img2 = filter.Apply(img2);

            Stopwatch st = new Stopwatch();
            st.Start();

            // collect reference points using corners detector (for example)
            SusanCornersDetector scd = new SusanCornersDetector(30, 18);
            List<IntPoint> points = scd.ProcessImage(img1);

            // create block matching algorithm's instance
            ExhaustiveBlockMatching bm = new ExhaustiveBlockMatching(12, 36);
            // process images searching for block matchings
            List<BlockMatch> matches = bm.ProcessImage(img1, points, img2);

            st.Stop();
            TimeSpan elapsed = st.Elapsed;
            timedisp.Text = "Elapsed time = " + elapsed.ToString();

            // draw displacement vectors
            BitmapData data = img1.LockBits(
                new System.Drawing.Rectangle(0, 0, img1.Width, img1.Height),
                ImageLockMode.ReadWrite, img1.PixelFormat);

            foreach (BlockMatch match in matches)
            {
                // highlight the original point in source image
                AForge.Imaging.Drawing.FillRectangle(data,
                    new System.Drawing.Rectangle(match.SourcePoint.X - 1, match.SourcePoint.Y - 1, 3, 3),
                    System.Drawing.Color.Yellow);
                // draw line to the point in search image
                AForge.Imaging.Drawing.Line(data, match.SourcePoint, match.MatchPoint, System.Drawing.Color.Red);

                // check similarity
                if (match.Similarity > 0.98f)
                {
                    // process block with high similarity
                }
            }

            img1.UnlockBits(data);
            bi = Compatibility.Compatibility.BitmapToBitmapImage(img1);
            image1.Source = bi;
        }