Integral image.

The class implements integral image concept, which is described by Viola and Jones in: P. Viola and M. J. Jones, "Robust real-time face detection", Int. Journal of Computer Vision 57(2), pp. 137–154, 2004.

"An integral image I of an input image G is defined as the image in which the intensity at a pixel position is equal to the sum of the intensities of all the pixels above and to the left of that position in the original image."

The intensity at position (x, y) can be written as:

x y I(x,y) = SUM( SUM( G(i,j) ) ) i=0 j=0

The class uses 32-bit integers to represent integral image.

The class processes only grayscale (8 bpp indexed) images.

This class contains two versions of each method: safe and unsafe. Safe methods do checks of provided coordinates and ensure that these coordinates belong to the image, what makes these methods slower. Unsafe methods do not do coordinates' checks and rely that these coordinates belong to the image, what makes these methods faster.

Sample usage:

// create integral image IntegralImage im = IntegralImage.FromBitmap( image ); // get pixels' mean value in the specified rectangle float mean = im.GetRectangleMean( 10, 10, 20, 30 )
Inheritance: ICloneable
 /// <summary>
 ///   Computes the filter using the specified <see cref="IntegralImage">
 ///   Integral Image.</see>
 /// </summary>
 ///
 /// <param name="integral">The integral image.</param>
 ///
 public void Compute(IntegralImage integral)
 {
     for (int i = 0; i < responses.Length; ++i)
     {
         responses[i].Compute(integral);
     }
 }
        /// <summary>
        ///   Creates a new object that is a copy of the current instance.
        /// </summary>
        ///
        /// <returns>
        ///   A new object that is a copy of this instance.
        /// </returns>
        ///
        public object Clone()
        {
            var clone = new IntegralImage(width, height);

            integralImage.CopyTo(clone.integralImage, 0);
            return(clone);
        }
        /// <summary>
        ///   Releases unmanaged and - optionally - managed resources.
        /// </summary>
        ///
        /// <param name="disposing"><c>true</c> to release both managed and unmanaged
        ///   resources; <c>false</c> to release only unmanaged resources.</param>
        ///
        protected virtual void Dispose(bool disposing)
        {
            if (disposing)
            {
                // free managed resources
            }

            this.responses  = null;
            this.integral   = null;
            this.descriptor = null;
        }
        /// <summary>
        ///   Initializes a new instance of the <see cref="FastRetinaKeypointDescriptor"/> class.
        /// </summary>
        ///
        internal FastRetinaKeypointDescriptor(UnmanagedImage image,
                                              IntegralImage integral, FastRetinaKeypointPattern pattern)
        {
            this.Extended            = false;
            this.IsOrientationNormal = true;
            this.IsScaleNormal       = true;
            this.Image    = image;
            this.Integral = integral;

            this.pattern = pattern;
        }
Exemple #5
0
        /// <summary>
        ///   Process image looking for interest points.
        /// </summary>
        ///
        /// <param name="image">Source image data to process.</param>
        ///
        /// <returns>Returns list of found interest points.</returns>
        ///
        public List <FastRetinaKeypoint> ProcessImage(UnmanagedImage image)
        {
            // check image format
            if (
                (image.PixelFormat != PixelFormat.Format8bppIndexed) &&
                (image.PixelFormat != PixelFormat.Format24bppRgb) &&
                (image.PixelFormat != PixelFormat.Format32bppRgb) &&
                (image.PixelFormat != PixelFormat.Format32bppArgb)
                )
            {
                throw new UnsupportedImageFormatException("Unsupported pixel format of the source image.");
            }

            // make sure we have grayscale image
            if (image.PixelFormat == PixelFormat.Format8bppIndexed)
            {
                grayImage = image;
            }
            else
            {
                // create temporary grayscale image
                grayImage = Grayscale.CommonAlgorithms.BT709.Apply(image);
            }


            // 1. Extract corners points from the image.
            List <IntPoint> corners = Detector.ProcessImage(grayImage);

            var features = new List <FastRetinaKeypoint>();

            for (int i = 0; i < corners.Count; i++)
            {
                features.Add(new FastRetinaKeypoint(corners[i].X, corners[i].Y));
            }


            // 2. Compute the integral for the given image
            integral = IntegralImage.FromBitmap(grayImage);


            // 3. Compute feature descriptors if required
            descriptor = null;
            if (featureType != FastRetinaKeypointDescriptorType.None)
            {
                descriptor = GetDescriptor();
                descriptor.Compute(features);
            }

            return(features);
        }
        public IntegralImageTest()
        {
            UnmanagedImage uImage = UnmanagedImage.Create(10, 10, PixelFormat.Format8bppIndexed);

            for (int y = 0; y < 10; y++)
            {
                for (int x = 0; x < 10; x++)
                {
                    uImage.SetPixel(x, y, ((x + y) % 2 == 0) ? Color.FromArgb(0, 0, 0) : Color.FromArgb(1, 1, 1));
                }
            }

            integralImage = IntegralImage.FromBitmap(uImage);
        }
        /// <summary>
        /// Construct integral image from source grayscale image.
        /// </summary>
        ///
        /// <param name="image">Source unmanaged image.</param>
        ///
        /// <returns>Returns integral image.</returns>
        ///
        /// <exception cref="UnsupportedImageFormatException">The source image has incorrect pixel format.</exception>
        ///
        public static IntegralImage FromBitmap(UnmanagedImage image)
        {
            // check image format
            if (image.PixelFormat != PixelFormat.Format8bppIndexed)
            {
                throw new ArgumentException("Source image can be grayscale (8 bpp indexed) image only.");
            }

            // get source image size
            int width  = image.Width;
            int height = image.Height;
            int offset = image.Stride - width;

            // create integral image
            var im = new IntegralImage(width, height);

            uint[][] matrix = im.matrix;

            // do the job
            unsafe
            {
                byte *src = (byte *)image.ImageData.ToPointer();

                // for each line
                for (int y = 1; y <= height; y++)
                {
                    uint rowSum = 0;

                    // for each pixel
                    for (int x = 1; x <= width; x++, src++)
                    {
                        image.CheckBounds(src);

                        rowSum += *src;

                        matrix[y][x] = rowSum + matrix[y - 1][x];
                    }
                    src += offset;
                }
            }

            return(im);
        }
        /// <summary>
        /// Construct integral image from source grayscale image.
        /// </summary>
        ///
        /// <param name="image">Source grayscale image.</param>
        ///
        /// <returns>Returns integral image.</returns>
        ///
        /// <exception cref="UnsupportedImageFormatException">The source image has incorrect pixel format.</exception>
        ///
        public static IntegralImage FromBitmap(Bitmap image)
        {
            // check image format
            if (image.PixelFormat != PixelFormat.Format8bppIndexed)
            {
                throw new UnsupportedImageFormatException("Source image can be grayscale (8 bpp indexed) image only.");
            }

            // lock source image
            BitmapData imageData = image.LockBits(ImageLockMode.ReadOnly);

            // process the image
            IntegralImage im = FromBitmap(imageData);

            // unlock image
            image.UnlockBits(imageData);

            return(im);
        }
            /// <summary>
            ///   Computes the filter for the specified integral image.
            /// </summary>
            ///
            /// <param name="image">The integral image.</param>
            ///
            public void Compute(IntegralImage image)
            {
                int   b = (Size - 1) / 2 + 1;
                int   c = Size / 3;
                int   w = Size;
                float inv = 1f / (w * w);
                float Dxx, Dyy, Dxy;

                for (int y = 0; y < Height; y++)
                {
                    for (int x = 0; x < Width; x++)
                    {
                        // Get the image coordinates
                        int i = y * Step;
                        int j = x * Step;

                        // Compute response components
                        Dxx = sum(image, i - c + 1, j - b, 2 * c - 1, w)
                              - sum(image, i - c + 1, j - c / 2, 2 * c - 1, c) * 3;

                        Dyy = sum(image, i - b, j - c + 1, w, 2 * c - 1)
                              - sum(image, i - c / 2, j - c + 1, c, 2 * c - 1) * 3;

                        Dxy = sum(image, i - c, j + 1, c, c)
                              + sum(image, i + 1, j - c, c, c)
                              - sum(image, i - c, j - c, c, c)
                              - sum(image, i + 1, j + 1, c, c);

                        // Normalize the filter responses with respect to their size
                        Dxx *= inv;
                        Dyy *= inv;
                        Dxy *= inv;

                        // Get the determinant of hessian response & laplacian sign
                        Responses[y, x] = (Dxx * Dyy) - (0.9f * 0.9f * Dxy * Dxy);
                        Laplacian[y, x] = (Dxx + Dyy) >= 0 ? 1 : 0;
                    }
                }
            }
Exemple #10
0
        /// <summary>
        ///   Computes the filter for the specified integral image.
        /// </summary>
        ///
        /// <param name="image">The integral image.</param>
        ///
        public void Compute(IntegralImage image)
        {
            int   b = (Size - 1) / 2 + 1;
            int   c = Size / 3;
            int   w = Size;
            float inv = 1f / (w * w);
            float Dxx, Dyy, Dxy;

            for (int y = 0; y < Height; y++)
            {
                for (int x = 0; x < Width; x++)
                {
                    // Get the image coordinates
                    int i = y * Step;
                    int j = x * Step;

                    // Compute response components
                    Dxx = ((int)image.GetRectangleSum(j - b, i - c + 1, j - b + w - 1, i - c + 2 * c - 1)
                           - (int)image.GetRectangleSum(j - c / 2, i - c + 1, j - c / 2 + c - 1, i - c + 2 * c - 1) * 3);

                    Dyy = ((int)image.GetRectangleSum(j - c + 1, i - b, j - c + 2 * c - 1, i - b + w - 1)
                           - (int)image.GetRectangleSum(j - c + 1, i - c / 2, j - c + 2 * c - 1, i - c / 2 + c - 1) * 3);

                    Dxy = ((int)image.GetRectangleSum(j + 1, i - c, j + c, i - 1)
                           + (int)image.GetRectangleSum(j - c, i + 1, j - 1, i + c)
                           - (int)image.GetRectangleSum(j - c, i - c, j - 1, i - 1)
                           - (int)image.GetRectangleSum(j + 1, i + 1, j + c, i + c));

                    // Normalize the filter responses with respect to their size
                    Dxx *= inv / 255f;
                    Dyy *= inv / 255f;
                    Dxy *= inv / 255f;

                    // Get the determinant of Hessian response & Laplacian sign
                    Responses[y, x] = (Dxx * Dyy) - (0.9f * 0.9f * Dxy * Dxy);
                    Laplacian[y, x] = (Dxx + Dyy) >= 0 ? 1 : 0;
                }
            }
        }
Exemple #11
0
        /// <summary>
        ///   This method should be implemented by inheriting classes to implement the
        ///   actual feature extraction, transforming the input image into a list of features.
        /// </summary>
        ///
        protected override IEnumerable <FastRetinaKeypoint> InnerTransform(UnmanagedImage image)
        {
            // make sure we have grayscale image
            if (image.PixelFormat == PixelFormat.Format8bppIndexed)
            {
                grayImage = image;
            }
            else
            {
                // create temporary grayscale image
                grayImage = Grayscale.CommonAlgorithms.BT709.Apply(image);
            }

            // 1. Extract corners points from the image.
            List <IntPoint> corners = Detector.ProcessImage(grayImage);

            var features = new List <FastRetinaKeypoint>();

            for (int i = 0; i < corners.Count; i++)
            {
                features.Add(new FastRetinaKeypoint(corners[i].X, corners[i].Y));
            }

            // 2. Compute the integral for the given image
            integral = IntegralImage.FromBitmap(grayImage);

            // 3. Compute feature descriptors if required
            descriptor = null;
            if (featureType != FastRetinaKeypointDescriptorType.None)
            {
                descriptor = GetDescriptor();
                descriptor.Compute(features);
            }

            return(features);
        }
 /// <summary>
 ///   Initializes a new instance of the <see cref="SurfDescriptor"/> class.
 /// </summary>
 /// 
 /// <param name="integralImage">
 ///   The integral image which is the source of the feature points.
 /// </param>
 /// 
 public SurfDescriptor(IntegralImage integralImage)
 {
     this.integral = integralImage;
 }
 private static float sum(IntegralImage img, int row, int col, int rows, int cols)
 {
     return(img.GetRectangleSum(col, row, col + cols - 1, row + rows - 1) / 255f);
 }
 /// <summary>
 ///   Initializes a new instance of the <see cref="SpeededUpRobustFeaturesDescriptor"/> class.
 /// </summary>
 /// 
 /// <param name="integralImage">
 ///   The integral image which is the source of the feature points.
 /// </param>
 /// 
 public SpeededUpRobustFeaturesDescriptor(IntegralImage integralImage)
 {
     this.integral = integralImage;
 }
        /// <summary>
        ///   Process image looking for interest points.
        /// </summary>
        ///
        /// <param name="image">Source image data to process.</param>
        ///
        /// <returns>Returns list of found interest points.</returns>
        ///
        /// <exception cref="UnsupportedImageFormatException">
        ///   The source image has incorrect pixel format.
        /// </exception>
        ///
        public List <SurfPoint> ProcessImage(UnmanagedImage image)
        {
            // check image format
            if (
                (image.PixelFormat != PixelFormat.Format8bppIndexed) &&
                (image.PixelFormat != PixelFormat.Format24bppRgb) &&
                (image.PixelFormat != PixelFormat.Format32bppRgb) &&
                (image.PixelFormat != PixelFormat.Format32bppArgb)
                )
            {
                throw new UnsupportedImageFormatException("Unsupported pixel format of the source image.");
            }

            // make sure we have grayscale image
            UnmanagedImage grayImage = null;

            if (image.PixelFormat == PixelFormat.Format8bppIndexed)
            {
                grayImage = image;
            }
            else
            {
                // create temporary grayscale image
                grayImage = Grayscale.CommonAlgorithms.BT709.Apply(image);
            }


            // 1. Compute the integral for the given image
            integral = IntegralImage.FromBitmap(grayImage);


            // 2. Compute interest point response map
            if (responses == null ||
                image.Width != responses.Width || image.Height != responses.Height)
            {
                responses = new ResponseFilters(image.Width, image.Height, octaves, initial);
            }

            responses.Compute(integral);


            // 3. Suppress non-maximum points
            List <SurfPoint> featureList = new List <SurfPoint>();

            // for each image pyramid in the response map
            foreach (ResponseLayer[] layers in responses)
            {
                // Grab the three layers forming the pyramid
                ResponseLayer bot = layers[0]; // bottom layer
                ResponseLayer mid = layers[1]; // middle layer
                ResponseLayer top = layers[2]; // top layer

                int border = (top.Size + 1) / (2 * top.Step);

                int tstep = top.Step;
                int mstep = mid.Size - bot.Size;

                int mscale = mid.Width / top.Width;
                int bscale = bot.Width / top.Width;

                int r = 1;

                // for each row
                for (int y = border + 1; y < top.Height - border; y++)
                {
                    // for each pixel
                    for (int x = border + 1; x < top.Width - border; x++)
                    {
                        float currentValue = mid.Responses[y * mscale, x *mscale];

                        // for each windows' row
                        for (int i = -r; (currentValue >= threshold) && (i <= r); i++)
                        {
                            // for each windows' pixel
                            for (int j = -r; j <= r; j++)
                            {
                                int yi = y + i;
                                int xj = x + j;

                                // for each response layer
                                if (top.Responses[yi, xj] >= currentValue ||
                                    bot.Responses[yi * bscale, xj *bscale] >= currentValue || ((i != 0 || j != 0) &&
                                                                                               mid.Responses[yi * mscale, xj *mscale] >= currentValue))
                                {
                                    currentValue = 0;
                                    break;
                                }
                            }
                        }

                        // check if this point is really interesting
                        if (currentValue >= threshold)
                        {
                            // interpolate to sub-pixel precision
                            double[] offset = interpolate(y, x, top, mid, bot);

                            if (System.Math.Abs(offset[0]) < 0.5 &&
                                System.Math.Abs(offset[1]) < 0.5 &&
                                System.Math.Abs(offset[2]) < 0.5)
                            {
                                featureList.Add(new SurfPoint(
                                                    (float)((x + offset[0]) * tstep),
                                                    (float)((y + offset[1]) * tstep),
                                                    (float)(0.1333f * (mid.Size + offset[2] * mstep)),
                                                    mid.Laplacian[y * mscale, x * mscale]));
                            }
                        }
                    }
                }
            }

            return(featureList);
        }
Exemple #16
0
        /// <summary>
        /// Construct integral image from source grayscale image.
        /// </summary>
        /// 
        /// <param name="image">Source unmanaged image.</param>
        /// 
        /// <returns>Returns integral image.</returns>
        /// 
        /// <exception cref="UnsupportedImageFormatException">The source image has incorrect pixel format.</exception>
        /// 
        public static IntegralImage FromBitmap(UnmanagedImage image)
        {
            // check image format
            if (image.PixelFormat != PixelFormat.Format8bppIndexed)
            {
                throw new ArgumentException("Source image can be graysclae (8 bpp indexed) image only.");
            }

            // get source image size
            int width = image.Width;
            int height = image.Height;
            int offset = image.Stride - width;

            // create integral image
            var im = new IntegralImage(width, height);
            uint[,] integralImage = im.integralImage;

            // do the job
            unsafe
            {
                byte* src = (byte*)image.ImageData.ToPointer();

                // for each line
                for (int y = 1; y <= height; y++)
                {
                    uint rowSum = 0;

                    // for each pixel
                    for (int x = 1; x <= width; x++, src++)
                    {
                        rowSum += *src;

                        integralImage[y, x] = rowSum + integralImage[y - 1, x];
                    }
                    src += offset;
                }
            }

            return im;
        }
        /// <summary>
        ///   Initializes a new instance of the <see cref="FastRetinaKeypointDescriptor"/> class.
        /// </summary>
        /// 
        internal FastRetinaKeypointDescriptor(UnmanagedImage image,
            IntegralImage integral, FastRetinaKeypointPattern pattern)
        {
            this.Extended = false;
            this.IsOrientationNormal = true;
            this.IsScaleNormal = true;
            this.Image = image;
            this.Integral = integral;

            this.pattern = pattern;
        }
Exemple #18
0
        private List <SpeededUpRobustFeaturePoint> processImage(UnmanagedImage image)
        {
            // make sure we have grayscale image
            UnmanagedImage grayImage = null;

            if (image.PixelFormat == PixelFormat.Format8bppIndexed)
            {
                grayImage = image;
            }
            else
            {
                // create temporary grayscale image
                grayImage = Grayscale.CommonAlgorithms.BT709.Apply(image);
            }


            // 1. Compute the integral for the given image
            integral = IntegralImage.FromBitmap(grayImage);



            // 2. Create and compute interest point response map
            if (responses == null)
            {
                // re-create only if really needed
                responses = new ResponseLayerCollection(image.Width, image.Height, octaves, initial);
            }
            else
            {
                responses.Update(image.Width, image.Height, initial);
            }

            // Compute the response map
            responses.Compute(integral);


            // 3. Suppress non-maximum points
            List <SpeededUpRobustFeaturePoint> featureList =
                new List <SpeededUpRobustFeaturePoint>();

            // for each image pyramid in the response map
            foreach (ResponseLayer[] layers in responses)
            {
                // Grab the three layers forming the pyramid
                ResponseLayer bot = layers[0]; // bottom layer
                ResponseLayer mid = layers[1]; // middle layer
                ResponseLayer top = layers[2]; // top layer

                int border = (top.Size + 1) / (2 * top.Step);

                int tstep = top.Step;
                int mstep = mid.Size - bot.Size;


                int r = 1;

                // for each row
                for (int y = border + 1; y < top.Height - border; y++)
                {
                    // for each pixel
                    for (int x = border + 1; x < top.Width - border; x++)
                    {
                        int mscale = mid.Width / top.Width;
                        int bscale = bot.Width / top.Width;

                        double currentValue = mid.Responses[y * mscale, x *mscale];

                        // for each windows' row
                        for (int i = -r; (currentValue >= threshold) && (i <= r); i++)
                        {
                            // for each windows' pixel
                            for (int j = -r; j <= r; j++)
                            {
                                int yi = y + i;
                                int xj = x + j;

                                // for each response layer
                                if (top.Responses[yi, xj] >= currentValue ||
                                    bot.Responses[yi * bscale, xj *bscale] >= currentValue || ((i != 0 || j != 0) &&
                                                                                               mid.Responses[yi * mscale, xj *mscale] >= currentValue))
                                {
                                    currentValue = 0;
                                    break;
                                }
                            }
                        }

                        // check if this point is really interesting
                        if (currentValue >= threshold)
                        {
                            // interpolate to sub-pixel precision
                            double[] offset = interpolate(y, x, top, mid, bot);

                            if (System.Math.Abs(offset[0]) < 0.5 &&
                                System.Math.Abs(offset[1]) < 0.5 &&
                                System.Math.Abs(offset[2]) < 0.5)
                            {
                                featureList.Add(new SpeededUpRobustFeaturePoint(
                                                    (x + offset[0]) * tstep,
                                                    (y + offset[1]) * tstep,
                                                    0.133333333 * (mid.Size + offset[2] * mstep),
                                                    mid.Laplacian[y * mscale, x * mscale]));
                            }
                        }
                    }
                }
            }

            descriptor = null;

            if (featureType != SpeededUpRobustFeatureDescriptorType.None)
            {
                descriptor           = new SpeededUpRobustFeaturesDescriptor(integral);
                descriptor.Extended  = featureType == SpeededUpRobustFeatureDescriptorType.Extended;
                descriptor.Invariant = computeOrientation;
                descriptor.Compute(featureList);
            }
            else if (computeOrientation)
            {
                descriptor = new SpeededUpRobustFeaturesDescriptor(integral);
                foreach (var p in featureList)
                {
                    p.Orientation = descriptor.GetOrientation(p);
                }
            }

            return(featureList);
        }
 /// <summary>
 ///   Initializes a new instance of the <see cref="SpeededUpRobustFeaturesDescriptor"/> class.
 /// </summary>
 ///
 /// <param name="integralImage">
 ///   The integral image which is the source of the feature points.
 /// </param>
 ///
 public SpeededUpRobustFeaturesDescriptor(IntegralImage integralImage)
 {
     this.integral = integralImage;
 }
Exemple #20
0
 /// <summary>
 ///   Creates a new object that is a copy of the current instance.
 /// </summary>
 /// 
 /// <returns>
 ///   A new object that is a copy of this instance.
 /// </returns>
 ///
 public object Clone()
 {
     var clone = new IntegralImage(width, height);
     integralImage.CopyTo(clone.integralImage, 0);
     return clone;
 }