/// <summary>Process the filter on the specified image.</summary> /// <param name="image">Source image data.</param> protected override unsafe void ProcessFilter(UnmanagedImage image) { IntegralImage im = IntegralImage.FromBitmap(image); int width = image.Width; int height = image.Height; int widthM1 = width - 1; int heightM1 = height - 1; int offset = image.Stride - width; int radius = windowSize / 2; float avgBrightnessPart = 1.0f - pixelBrightnessDifferenceLimit; byte * ptr = (byte *)image.ImageData.ToPointer( ); for (int y = 0; y < height; y++) { int y1 = (y - radius < 0) ? 0 : y - radius; int y2 = (y + radius > heightM1) ? heightM1 : y + radius; for (int x = 0; x < width; x++, ptr++) { int x1 = (x - radius < 0) ? 0 : x - radius; int x2 = (x + radius > widthM1) ? widthM1 : x + radius; float mean = im.GetRectangleMeanUnsafe(x1, y1, x2, y2); * ptr = (byte)((mean < upperLimit && *ptr < (int)(mean * avgBrightnessPart)) ? 0 : 255); } ptr += offset; } }
/// <summary> /// Process the filter on the specified image. /// </summary> /// /// <param name="image">Source image data.</param> /// protected override unsafe void ProcessFilter(UnmanagedImage image) { // create integral image IntegralImage im = IntegralImage.FromBitmap(image); int width = image.Width; int height = image.Height; int widthM1 = width - 1; int heightM1 = height - 1; int offset = image.Stride - width; int radius = windowSize / 2; float avgBrightnessPart = 1.0f - pixelBrightnessDifferenceLimit; byte *ptr = (byte *)image.ImageData.ToPointer( ); for (int y = 0; y < height; y++) { // rectangle's Y coordinates int y1 = y - radius; int y2 = y + radius; if (y1 < 0) { y1 = 0; } if (y2 > heightM1) { y2 = heightM1; } for (int x = 0; x < width; x++, ptr++) { // rectangle's X coordinates int x1 = x - radius; int x2 = x + radius; if (x1 < 0) { x1 = 0; } if (x2 > widthM1) { x2 = widthM1; } // *ptr = (byte) ( ( *ptr < (int) ( im.GetRectangleMeanUnsafe( x1, y1, x2, y2 ) * avgBrightnessPart ) ) ? 0 : 255 ); //*ptr = (byte) ( im.GetRectangleMeanUnsafe( x1, y1, x2, y2 ) * 255); ptr[RGB.R] = ptr[RGB.R] = ptr[RGB.R] = (byte)(im.GetRectangleMeanUnsafe(x1, y1, x2, y2)); } ptr += offset; } }
/// <summary> /// Process the filter on the specified image. /// </summary> /// /// <param name="image">Source image data.</param> /// protected override unsafe void ProcessFilter(UnmanagedImage image) { // create integral image var im = IntegralImage.FromBitmap(image); var width = image.Width; var height = image.Height; var widthM1 = width - 1; var heightM1 = height - 1; var offset = image.Stride - width; var radius = this.windowSize / 2; var avgBrightnessPart = 1.0f - this.pixelBrightnessDifferenceLimit; var ptr = (byte *)image.ImageData.ToPointer( ); for (var y = 0; y < height; y++) { // rectangle's Y coordinates var y1 = y - radius; var y2 = y + radius; if (y1 < 0) { y1 = 0; } if (y2 > heightM1) { y2 = heightM1; } for (var x = 0; x < width; x++, ptr++) { // rectangle's X coordinates var x1 = x - radius; var x2 = x + radius; if (x1 < 0) { x1 = 0; } if (x2 > widthM1) { x2 = widthM1; } *ptr = (byte)((*ptr < (int)(im.GetRectangleMeanUnsafe(x1, y1, x2, y2) * avgBrightnessPart)) ? 0 : 255); } ptr += offset; } }
public IntegralImageTest() { UnmanagedImage uImage = UnmanagedImage.Create(10, 10, PixelFormat.Format8bppIndexed); for (int y = 0; y < 10; y++) { for (int x = 0; x < 10; x++) { uImage.SetPixel(x, y, ((x + y) % 2 == 0) ? Color.FromArgb(0, 0, 0) : Color.FromArgb(1, 1, 1)); } } integralImage = IntegralImage.FromBitmap(uImage); }
public List <ScaleInvariantFeatureTranformKeypoint> ProcessImage(UnmanagedImage image) { // check image format if ( (image.PixelFormat != PixelFormat.Format8bppIndexed) && (image.PixelFormat != PixelFormat.Format24bppRgb) && (image.PixelFormat != PixelFormat.Format32bppRgb) && (image.PixelFormat != PixelFormat.Format32bppArgb) ) { throw new UnsupportedImageFormatException("Unsupported pixel format of the source image."); } // make sure we have grayscale image if (image.PixelFormat == PixelFormat.Format8bppIndexed) { _grayImage = image; } else { // create temporary grayscale image _grayImage = Grayscale.CommonAlgorithms.BT709.Apply(image); } // 1. Extract corners points from the image. List <IntPoint> corners = Detector.ProcessImage(_grayImage); List <ScaleInvariantFeatureTranformKeypoint> features = new List <ScaleInvariantFeatureTranformKeypoint>(); for (int i = 0; i < corners.Count; i++) { features.Add(new ScaleInvariantFeatureTranformKeypoint(corners[i].X, corners[i].Y)); } // 2. Compute the integral for the given image _integral = IntegralImage.FromBitmap(_grayImage); // 3. Compute feature descriptors if required _descriptor = null; if (_featureType != ScaleInvariantFeatureTranformKeypointDescriptorType.None) { _descriptor = GetDescriptor(); _descriptor.Compute(features); } return(features); }
protected override unsafe void ProcessFilter(UnmanagedImage image) { IntegralImage image2 = IntegralImage.FromBitmap(image); int width = image.Width; int height = image.Height; int num3 = width - 1; int num4 = height - 1; int num5 = image.Stride - width; int num6 = this.windowSize / 2; float num7 = 1f - this.pixelBrightnessDifferenceLimit; byte * numPtr = (byte *)image.ImageData.ToPointer(); for (int i = 0; i < height; i++) { int num9 = i - num6; int num10 = i + num6; if (num9 < 0) { num9 = 0; } if (num10 > num4) { num10 = num4; } int num11 = 0; while (num11 < width) { int num12 = num11 - num6; int num13 = num11 + num6; if (num12 < 0) { num12 = 0; } if (num13 > num3) { num13 = num3; } numPtr[0] = (numPtr[0] < ((int)(image2.GetRectangleMeanUnsafe(num12, num9, num13, num10) * num7))) ? ((byte)0) : ((byte)0xff); num11++; numPtr++; } numPtr += num5; } }
protected unsafe override void ProcessFilter(UnmanagedImage image) { IntegralImage integralImage = IntegralImage.FromBitmap(image); int width = image.Width; int height = image.Height; int num = width - 1; int num2 = height - 1; int num3 = image.Stride - width; int num4 = windowSize / 2; float num5 = 1f - pixelBrightnessDifferenceLimit; byte * ptr = (byte *)image.ImageData.ToPointer(); for (int i = 0; i < height; i++) { int num6 = i - num4; int num7 = i + num4; if (num6 < 0) { num6 = 0; } if (num7 > num2) { num7 = num2; } int num8 = 0; while (num8 < width) { int num9 = num8 - num4; int num10 = num8 + num4; if (num9 < 0) { num9 = 0; } if (num10 > num) { num10 = num; } *ptr = (byte)((*ptr >= (int)(integralImage.GetRectangleMeanUnsafe(num9, num6, num10, num7) * num5)) ? 255 : 0); num8++; ptr++; } ptr += num3; } }
/// <summary> /// Process the filter on the specified image. /// /// </summary> /// <param name="image">Source image data.</param> protected override unsafe void ProcessFilter(UnmanagedImage image) { IntegralImage integralImage = IntegralImage.FromBitmap(image); int width = image.Width; int height = image.Height; int num1 = width - 1; int num2 = height - 1; int num3 = image.Stride - width; int num4 = _windowSize / 2; float num5 = 1f - _pixelBrightnessDifferenceLimit; byte * numPtr = (byte *)image.ImageData.ToPointer(); for (int index = 0; index < height; ++index) { int y1 = index - num4; int y2 = index + num4; if (y1 < 0) { y1 = 0; } if (y2 > num2) { y2 = num2; } int num6 = 0; while (num6 < width) { int x1 = num6 - num4; int x2 = num6 + num4; if (x1 < 0) { x1 = 0; } if (x2 > num1) { x2 = num1; } *numPtr = (int)*numPtr < (int)((double)integralImage.GetRectangleMeanUnsafe(x1, y1, x2, y2) * (double)num5) ? (byte)0 : byte.MaxValue; ++num6; ++numPtr; } numPtr += num3; } }
public List <SpeededUpRobustFeaturePoint> ProcessImage(UnmanagedImage image) { // check image format if ( (image.PixelFormat != PixelFormat.Format8bppIndexed) && (image.PixelFormat != PixelFormat.Format24bppRgb) && (image.PixelFormat != PixelFormat.Format32bppRgb) && (image.PixelFormat != PixelFormat.Format32bppArgb) ) { throw new UnsupportedImageFormatException("Unsupported pixel format of the source image."); } // make sure we have grayscale image UnmanagedImage grayImage = null; if (image.PixelFormat == PixelFormat.Format8bppIndexed) { grayImage = image; } else { // create temporary grayscale image grayImage = Grayscale.CommonAlgorithms.BT709.Apply(image); } // 1. Compute the integral for the given image _integral = IntegralImage.FromBitmap(grayImage); // 2. Create and compute interest point response map if (_responses == null) { // re-create only if really needed _responses = new ResponseLayerCollection(image.Width, image.Height, _octaves, _initial); } else { _responses.Update(image.Width, image.Height, _initial); } // Compute the response map _responses.Compute(_integral); // 3. Suppress non-maximum points List <SpeededUpRobustFeaturePoint> featureList = new List <SpeededUpRobustFeaturePoint>(); // for each image pyramid in the response map foreach (ResponseLayer[] layers in _responses) { // Grab the three layers forming the pyramid ResponseLayer bot = layers[0]; // bottom layer ResponseLayer mid = layers[1]; // middle layer ResponseLayer top = layers[2]; // top layer int border = (top.Size + 1) / (2 * top.Step); int tstep = top.Step; int mstep = mid.Size - bot.Size; int mscale = mid.Width / top.Width; int bscale = bot.Width / top.Width; int r = 1; // for each row for (int y = border + 1; y < top.Height - border; y++) { // for each pixel for (int x = border + 1; x < top.Width - border; x++) { double currentValue = mid.Responses[y * mscale, x *mscale]; // for each windows' row for (int i = -r; (currentValue >= _threshold) && (i <= r); i++) { // for each windows' pixel for (int j = -r; j <= r; j++) { int yi = y + i; int xj = x + j; // for each response layer if (top.Responses[yi, xj] >= currentValue || bot.Responses[yi * bscale, xj *bscale] >= currentValue || ((i != 0 || j != 0) && mid.Responses[yi * mscale, xj *mscale] >= currentValue)) { currentValue = 0; break; } } } // check if this point is really interesting if (currentValue >= _threshold) { // interpolate to sub-pixel precision double[] offset = interpolate(y, x, top, mid, bot); if (System.Math.Abs(offset[0]) < 0.5 && System.Math.Abs(offset[1]) < 0.5 && System.Math.Abs(offset[2]) < 0.5) { featureList.Add(new SpeededUpRobustFeaturePoint( (x + offset[0]) * tstep, (y + offset[1]) * tstep, 0.133333333 * (mid.Size + offset[2] * mstep), mid.Laplacian[y * mscale, x * mscale])); } } } } } _descriptor = null; if (_featureType != SpeededUpRobustFeatureDescriptorType.None) { _descriptor = new SpeededUpRobustFeaturesDescriptor(_integral); _descriptor.Extended = _featureType == SpeededUpRobustFeatureDescriptorType.Extended; _descriptor.Invariant = _computeOrientation; _descriptor.Compute(featureList); } else if (_computeOrientation) { _descriptor = new SpeededUpRobustFeaturesDescriptor(_integral); foreach (var p in featureList) { p.Orientation = _descriptor.GetOrientation(p); } } return(featureList); }