/// <summary> /// Computes the filter using the specified <see cref="IntegralImage"> /// Integral Image.</see> /// </summary> /// /// <param name="integral">The integral image.</param> /// public void Compute(IntegralImage integral) { for (int i = 0; i < responses.Length; ++i) { responses[i].Compute(integral); } }
/// <summary> /// Initializes a new instance of the <see cref="FastRetinaKeypointDescriptor"/> class. /// </summary> /// internal FastRetinaKeypointDescriptor(UnmanagedImage image, IntegralImage integral, FastRetinaKeypointPattern pattern) { this.Extended = false; this.IsOrientationNormal = true; this.IsScaleNormal = true; this.Image = image; this.Integral = integral; this.pattern = pattern; }
/// <summary> /// Process image looking for interest points. /// </summary> /// /// <param name="image">Source image data to process.</param> /// /// <returns>Returns list of found interest points.</returns> /// public List <FastRetinaKeypoint> ProcessImage(UnmanagedImage image) { // check image format if ( (image.PixelFormat != PixelFormat.Format8bppIndexed) && (image.PixelFormat != PixelFormat.Format24bppRgb) && (image.PixelFormat != PixelFormat.Format32bppRgb) && (image.PixelFormat != PixelFormat.Format32bppArgb) ) { throw new UnsupportedImageFormatException("Unsupported pixel format of the source image."); } // make sure we have grayscale image if (image.PixelFormat == PixelFormat.Format8bppIndexed) { grayImage = image; } else { // create temporary grayscale image grayImage = Grayscale.CommonAlgorithms.BT709.Apply(image); } // 1. Extract corners points from the image. List <IntPoint> corners = Detector.ProcessImage(grayImage); List <FastRetinaKeypoint> features = new List <FastRetinaKeypoint>(); for (int i = 0; i < corners.Count; i++) { features.Add(new FastRetinaKeypoint(corners[i].X, corners[i].Y)); } // 2. Compute the integral for the given image integral = IntegralImage.FromBitmap(grayImage); // 3. Compute feature descriptors if required descriptor = null; if (featureType != FastRetinaKeypointDescriptorType.None) { descriptor = GetDescriptor(); descriptor.Compute(features); } return(features); }
/// <summary> /// Construct integral image from source grayscale image. /// </summary> /// /// <param name="image">Source unmanaged image.</param> /// /// <returns>Returns integral image.</returns> /// /// <exception cref="UnsupportedImageFormatException">The source image has incorrect pixel format.</exception> /// public static IntegralImage FromBitmap(UnmanagedImage image) { // check image format if (image.PixelFormat != PixelFormat.Format8bppIndexed) { throw new ArgumentException("Source image can be graysclae (8 bpp indexed) image only."); } // get source image size int width = image.Width; int height = image.Height; int offset = image.Stride - width; // create integral image IntegralImage im = new IntegralImage(width, height); uint[,] integralImage = im.integralImage; // do the job unsafe { byte *src = (byte *)image.ImageData.ToPointer( ); // for each line for (int y = 1; y <= height; y++) { uint rowSum = 0; // for each pixel for (int x = 1; x <= width; x++, src++) { rowSum += *src; integralImage[y, x] = rowSum + integralImage[y - 1, x]; } src += offset; } } return(im); }
/// <summary> /// Construct integral image from source grayscale image. /// </summary> /// /// <param name="image">Source grayscale image.</param> /// /// <returns>Returns integral image.</returns> /// /// <exception cref="UnsupportedImageFormatException">The source image has incorrect pixel format.</exception> /// public static IntegralImage FromBitmap(Bitmap image) { // check image format if (image.PixelFormat != PixelFormat.Format8bppIndexed) { throw new UnsupportedImageFormatException("Source image can be graysclae (8 bpp indexed) image only."); } // lock source image BitmapData imageData = image.LockBits( new Rectangle(0, 0, image.Width, image.Height), ImageLockMode.ReadOnly, PixelFormat.Format8bppIndexed); // process the image IntegralImage im = FromBitmap(imageData); // unlock image image.UnlockBits(imageData); return(im); }
/// <summary> /// Computes the filter for the specified integral image. /// </summary> /// /// <param name="image">The integral image.</param> /// public void Compute(IntegralImage image) { int b = (Size - 1) / 2 + 1; int c = Size / 3; int w = Size; float inv = 1f / (w * w); float Dxx, Dyy, Dxy; for (int y = 0; y < Height; y++) { for (int x = 0; x < Width; x++) { // Get the image coordinates int i = y * Step; int j = x * Step; // Compute response components Dxx = ((int)image.GetRectangleSum(j - b, i - c + 1, j - b + w - 1, i - c + 2 * c - 1) - (int)image.GetRectangleSum(j - c / 2, i - c + 1, j - c / 2 + c - 1, i - c + 2 * c - 1) * 3); Dyy = ((int)image.GetRectangleSum(j - c + 1, i - b, j - c + 2 * c - 1, i - b + w - 1) - (int)image.GetRectangleSum(j - c + 1, i - c / 2, j - c + 2 * c - 1, i - c / 2 + c - 1) * 3); Dxy = ((int)image.GetRectangleSum(j + 1, i - c, j + c, i - 1) + (int)image.GetRectangleSum(j - c, i + 1, j - 1, i + c) - (int)image.GetRectangleSum(j - c, i - c, j - 1, i - 1) - (int)image.GetRectangleSum(j + 1, i + 1, j + c, i + c)); // Normalize the filter responses with respect to their size Dxx *= inv / 255f; Dyy *= inv / 255f; Dxy *= inv / 255f; // Get the determinant of Hessian response & laplacian sign Responses[y, x] = (Dxx * Dyy) - (0.9f * 0.9f * Dxy * Dxy); Laplacian[y, x] = (Dxx + Dyy) >= 0 ? 1 : 0; } } }
/// <summary> /// Initializes a new instance of the <see cref="SpeededUpRobustFeaturesDescriptor"/> class. /// </summary> /// /// <param name="integralImage"> /// The integral image which is the source of the feature points. /// </param> /// public SpeededUpRobustFeaturesDescriptor(IntegralImage integralImage) { this.integral = integralImage; }
private List <SpeededUpRobustFeaturePoint> processImage(UnmanagedImage image) { // make sure we have grayscale image UnmanagedImage grayImage = null; if (image.PixelFormat == PixelFormat.Format8bppIndexed) { grayImage = image; } else { // create temporary grayscale image grayImage = Grayscale.CommonAlgorithms.BT709.Apply(image); } // 1. Compute the integral for the given image integral = IntegralImage.FromBitmap(grayImage); // 2. Create and compute interest point response map if (responses == null) { // re-create only if really needed responses = new ResponseLayerCollection(image.Width, image.Height, octaves, initial); } else { responses.Update(image.Width, image.Height, initial); } // Compute the response map responses.Compute(integral); // 3. Suppress non-maximum points List <SpeededUpRobustFeaturePoint> featureList = new List <SpeededUpRobustFeaturePoint>(); // for each image pyramid in the response map foreach (ResponseLayer[] layers in responses) { // Grab the three layers forming the pyramid ResponseLayer bot = layers[0]; // bottom layer ResponseLayer mid = layers[1]; // middle layer ResponseLayer top = layers[2]; // top layer int border = (top.Size + 1) / (2 * top.Step); int tstep = top.Step; int mstep = mid.Size - bot.Size; int r = 1; // for each row for (int y = border + 1; y < top.Height - border; y++) { // for each pixel for (int x = border + 1; x < top.Width - border; x++) { int mscale = mid.Width / top.Width; int bscale = bot.Width / top.Width; double currentValue = mid.Responses[y * mscale, x *mscale]; // for each windows' row for (int i = -r; (currentValue >= threshold) && (i <= r); i++) { // for each windows' pixel for (int j = -r; j <= r; j++) { int yi = y + i; int xj = x + j; // for each response layer if (top.Responses[yi, xj] >= currentValue || bot.Responses[yi * bscale, xj *bscale] >= currentValue || ((i != 0 || j != 0) && mid.Responses[yi * mscale, xj *mscale] >= currentValue)) { currentValue = 0; break; } } } // check if this point is really interesting if (currentValue >= threshold) { // interpolate to sub-pixel precision double[] offset = interpolate(y, x, top, mid, bot); if (System.Math.Abs(offset[0]) < 0.5 && System.Math.Abs(offset[1]) < 0.5 && System.Math.Abs(offset[2]) < 0.5) { featureList.Add(new SpeededUpRobustFeaturePoint( (x + offset[0]) * tstep, (y + offset[1]) * tstep, 0.133333333 * (mid.Size + offset[2] * mstep), mid.Laplacian[y * mscale, x * mscale])); } } } } } descriptor = null; if (featureType != SpeededUpRobustFeatureDescriptorType.None) { descriptor = new SpeededUpRobustFeaturesDescriptor(integral); descriptor.Extended = featureType == SpeededUpRobustFeatureDescriptorType.Extended; descriptor.Invariant = computeOrientation; descriptor.Compute(featureList); } else if (computeOrientation) { descriptor = new SpeededUpRobustFeaturesDescriptor(integral); foreach (var p in featureList) { p.Orientation = descriptor.GetOrientation(p); } } return(featureList); }
/// <summary> /// Construct integral image from source grayscale image. /// </summary> /// /// <param name="image">Source unmanaged image.</param> /// /// <returns>Returns integral image.</returns> /// /// <exception cref="UnsupportedImageFormatException">The source image has incorrect pixel format.</exception> /// public static IntegralImage FromBitmap( UnmanagedImage image ) { // check image format if ( image.PixelFormat != PixelFormat.Format8bppIndexed ) { throw new ArgumentException( "Source image can be graysclae (8 bpp indexed) image only." ); } // get source image size int width = image.Width; int height = image.Height; int offset = image.Stride - width; // create integral image IntegralImage im = new IntegralImage( width, height ); uint[,] integralImage = im.integralImage; // do the job unsafe { byte* src = (byte*) image.ImageData.ToPointer( ); // for each line for ( int y = 1; y <= height; y++ ) { uint rowSum = 0; // for each pixel for ( int x = 1; x <= width; x++, src++ ) { rowSum += *src; integralImage[y, x] = rowSum + integralImage[y - 1, x]; } src += offset; } } return im; }