private Bitmap processImage(Bitmap img) { //Generate closing structural element short[,] structEl = new short[13, 13]; for (int i = 0; i < 13; i++) for (int j = 0; j < 13; j++) if ((i - 6) * (i - 6) + (j - 6) * (j - 6) < 64) structEl[i, j] = 1; else structEl[i, j] = -1; //Initialize filters HSLFiltering borderFind = new HSLFiltering(); Closing borderClose = new Closing(structEl); Invert invert = new Invert(); Grayscale grayFilter = new Grayscale(0, 0, 1.0); Threshold bwFilter = new Threshold(1); PointedColorFloodFill blackout = new PointedColorFloodFill(); blackout.Tolerance = Color.FromArgb(0, 0, 0); blackout.FillColor = Color.FromArgb(0, 0, 0); ExtractBiggestBlob getgame = new ExtractBiggestBlob(); getgame.OriginalImage = new Bitmap(img); GrayscaleToRGB colorFilter = new GrayscaleToRGB(); //Color determined with ColorProbe. borderFind.Hue = new IntRange(190, 200); borderFind.Saturation = new Range(0.6f, 0.8f); borderFind.Luminance = new Range(0.6f, 1.0f); borderFind.ApplyInPlace(img); borderClose.ApplyInPlace(img); img = grayFilter.Apply(img); bwFilter.ApplyInPlace(img); invert.ApplyInPlace(img); img = colorFilter.Apply(img); blackout.StartingPoint = new AForge.IntPoint(0, 0); blackout.ApplyInPlace(img); img = getgame.Apply(img); int tilesx = img.Width / 56; int tilesy = img.Height / 56; int offsetx = 56 * (int)(tilesx - img.Width / 56.0); int offsety = 56 * (int)(tilesy - img.Height / 56.0); if ((Math.Abs(offsetx) > 11) || (Math.Abs(offsety) > 11)) throw new GameNotFoundException(); List<IntPoint> corners = new List<IntPoint>(); Dictionary<IntPoint, Bitmap> tiles = new Dictionary<IntPoint, Bitmap>(); SimpleQuadrilateralTransformation tileXtract = new SimpleQuadrilateralTransformation(); for (int j = 0; j < tilesy; j++) for (int i = 0; i < tilesx; i++) { corners.Add(new IntPoint(offsetx + i * 56, offsety + j * 56 )); corners.Add(new IntPoint(offsetx + i * 56, offsety + (j + 1) * 56 - 1)); corners.Add(new IntPoint(offsetx + (i + 1) * 56 - 1, offsety + (j + 1) * 56 - 1)); corners.Add(new IntPoint(offsetx + (i + 1) * 56 - 1, offsety + j * 56 )); tileXtract.SourceQuadrilateral = corners; tiles.Add(new IntPoint(i, j), tileXtract.Apply(img)); corners.Clear(); } img = (Bitmap)Properties.Resources.ResourceManager.GetObject("cb"); /*Graphics g = Graphics.FromImage(img); Pen bluePen = new Pen(Color.Blue, 2); for (int i = 0, n = blobs.Length; i < n; i++) { List<IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blobs[i]); if (edgePoints.Count > 1) { List<IntPoint> corners = PointsCloud.FindQuadrilateralCorners(edgePoints); g.DrawPolygon(bluePen, ToPointsArray(corners)); } } bluePen.Dispose(); g.Dispose(); */ return img; }
// ========================================================================================================== // Functions compatible with lists: // ========================================================================================================== // Note, that each function needs to keep the image in RGB, otherwise drawing fill fail // ========================================================= private void NoiseReduction_Funct(ref Bitmap frame, int par_int, double par_d, int par_R, int par_G, int par_B) { frame = Grayscale.CommonAlgorithms.RMY.Apply(frame); // Make gray switch (par_int) { case 1: BilateralSmoothing Bil_filter = new BilateralSmoothing(); Bil_filter.KernelSize =7; Bil_filter.SpatialFactor = 10; Bil_filter.ColorFactor = 30; Bil_filter.ColorPower = 0.5; Bil_filter.ApplyInPlace(frame); break; case 2: Median M_filter = new Median(); M_filter.ApplyInPlace(frame); break; case 3: Mean Meanfilter = new Mean(); // apply the MirrFilter Meanfilter.ApplyInPlace(frame); break; default: Median Median_filter = new Median(); Median_filter.ApplyInPlace(frame); break; } GrayscaleToRGB RGBfilter = new GrayscaleToRGB(); // back to color format frame = RGBfilter.Apply(frame); }
/// <summary> /// Convert grayscale to RGB colour space. /// </summary> /// <param name="bitmap">The bitmap.</param> public static Bitmap Channel(this Bitmap bitmap) { var imageStatistics = new ImageStatistics(bitmap); if (!imageStatistics.IsGrayscale) return bitmap; var grayscaleToRgb = new GrayscaleToRGB(); var result = grayscaleToRgb.Apply(bitmap); bitmap.Dispose(); return result; }
/// <summary> /// Convert grayscale to RGB colour space. /// </summary> /// <param name="Bitmap">The bitmap.</param> public static Bitmap Channel(this Bitmap Bitmap) { // Initialize a new instance of the ImageStatistics class. ImageStatistics ImageStatistics = new ImageStatistics(Bitmap); // Check if the image is grayscale. if (ImageStatistics.IsGrayscale) { // Initialize a new instance of the GrayscaleToRGB class. GrayscaleToRGB GrayscaleToRGB = new GrayscaleToRGB(); // Apply the filter to the image. Bitmap Result = GrayscaleToRGB.Apply(Bitmap); // Dispose of the original image. Bitmap.Dispose(); // Return the result. return Result; } // Return the bitmap. return Bitmap; }
protected unsafe override void ProcessFilter(UnmanagedImage sourceData, UnmanagedImage destinationData) { int width = sourceData.Width; int height = sourceData.Height; if (textureGenerator != null) { texture = textureGenerator.Generate(width, height); } else if (texture.GetLength(0) != height || texture.GetLength(1) != width) { throw new InvalidImagePropertiesException("Texture size does not match image size."); } UnmanagedImage unmanagedImage = filter1.Apply(sourceData); if (width != unmanagedImage.Width || height != unmanagedImage.Height) { unmanagedImage.Dispose(); throw new ApplicationException("Filters should not change image dimension."); } if (unmanagedImage.PixelFormat == PixelFormat.Format8bppIndexed) { GrayscaleToRGB grayscaleToRGB = new GrayscaleToRGB(); UnmanagedImage unmanagedImage2 = grayscaleToRGB.Apply(unmanagedImage); unmanagedImage.Dispose(); unmanagedImage = unmanagedImage2; } UnmanagedImage unmanagedImage3 = null; if (filter2 != null) { unmanagedImage3 = filter2.Apply(sourceData); if (width != unmanagedImage3.Width || height != unmanagedImage3.Height) { unmanagedImage.Dispose(); unmanagedImage3.Dispose(); throw new ApplicationException("Filters should not change image dimension."); } if (unmanagedImage3.PixelFormat == PixelFormat.Format8bppIndexed) { GrayscaleToRGB grayscaleToRGB2 = new GrayscaleToRGB(); UnmanagedImage unmanagedImage4 = grayscaleToRGB2.Apply(unmanagedImage3); unmanagedImage3.Dispose(); unmanagedImage3 = unmanagedImage4; } } if (unmanagedImage3 == null) { unmanagedImage3 = sourceData; } byte *ptr = (byte *)destinationData.ImageData.ToPointer(); byte *ptr2 = (byte *)unmanagedImage.ImageData.ToPointer(); byte *ptr3 = (byte *)unmanagedImage3.ImageData.ToPointer(); int num = destinationData.Stride - 3 * width; int num2 = unmanagedImage.Stride - 3 * width; int num3 = unmanagedImage3.Stride - 3 * width; if (preserveLevel != 0.0) { for (int i = 0; i < height; i++) { for (int j = 0; j < width; j++) { double num4 = texture[i, j]; double num5 = 1.0 - num4; int num6 = 0; while (num6 < 3) { *ptr = (byte)System.Math.Min(255.0, filterLevel * (num4 * (double)(int)(*ptr2) + num5 * (double)(int)(*ptr3)) + preserveLevel * (double)(int)(*ptr3)); num6++; ptr2++; ptr3++; ptr++; } } ptr2 += num2; ptr3 += num3; ptr += num; } } else { for (int k = 0; k < height; k++) { for (int l = 0; l < width; l++) { double num7 = texture[k, l]; double num8 = 1.0 - num7; int num9 = 0; while (num9 < 3) { *ptr = (byte)System.Math.Min(255.0, num7 * (double)(int)(*ptr2) + num8 * (double)(int)(*ptr3)); num9++; ptr2++; ptr3++; ptr++; } } ptr2 += num2; ptr3 += num3; ptr += num; } } unmanagedImage.Dispose(); if (unmanagedImage3 != sourceData) { unmanagedImage3.Dispose(); } }
static Bitmap GrayScaleImageToBitmap(ColorImageFrame image) { try { if (image != null) { var pixeldata = new byte[image.PixelDataLength]; image.CopyPixelDataTo(pixeldata); var bitmapFrame = new Bitmap(image.Width, image.Height, PixelFormat.Format16bppGrayScale); BitmapData bmapdata = bitmapFrame.LockBits( new Rectangle(0, 0, image.Width, image.Height), ImageLockMode.WriteOnly, bitmapFrame.PixelFormat); var ptr = bmapdata.Scan0; Marshal.Copy(pixeldata, 0, ptr, image.PixelDataLength); bitmapFrame.UnlockBits(bmapdata); var filter = new GrayscaleToRGB(); return filter.Apply(AForge.Imaging.Image.Convert16bppTo8bpp(bitmapFrame)); } } catch (Exception ex) { Logger.LogExceptionToFile(ex, "KinectStream"); } return null; }
// ========================================================= private void Edge_detectFunc(ref Bitmap frame, int par_int, double par_d, int par_R, int par_G, int par_B) { frame = Grayscale.CommonAlgorithms.RMY.Apply(frame); // Make gray switch (par_int) { case 1: SobelEdgeDetector SobelFilter = new SobelEdgeDetector(); SobelFilter.ApplyInPlace(frame); break; case 2: DifferenceEdgeDetector DifferenceFilter = new DifferenceEdgeDetector(); DifferenceFilter.ApplyInPlace(frame); break; case 3: HomogenityEdgeDetector HomogenityFilter = new HomogenityEdgeDetector(); HomogenityFilter.ApplyInPlace(frame); break; case 4: CannyEdgeDetector Cannyfilter = new CannyEdgeDetector(); // apply the MirrFilter Cannyfilter.ApplyInPlace(frame); break; default: HomogenityEdgeDetector filter = new HomogenityEdgeDetector(); filter.ApplyInPlace(frame); break; } GrayscaleToRGB RGBfilter = new GrayscaleToRGB(); // back to color format frame = RGBfilter.Apply(frame); }
/// <summary> /// Process the filter on the specified image. /// </summary> /// /// <param name="sourceData">Source image data.</param> /// <param name="destinationData">Destination image data.</param> /// /// <exception cref="InvalidImagePropertiesException">Texture size does not match image size.</exception> /// <exception cref="ApplicationException">Filters should not change image dimension.</exception> /// protected override unsafe void ProcessFilter( UnmanagedImage sourceData, UnmanagedImage destinationData ) { // get source image dimension int width = sourceData.Width; int height = sourceData.Height; // if generator was specified, then generate a texture // otherwise use provided texture if ( textureGenerator != null ) { texture = textureGenerator.Generate( width, height ); } else { // check existing texture if ( ( texture.GetLength( 0 ) != height ) || ( texture.GetLength( 1 ) != width ) ) { // sorry, but source image must have the same dimension as texture throw new InvalidImagePropertiesException( "Texture size does not match image size." ); } } // apply first filter UnmanagedImage filteredImage1 = filter1.Apply( sourceData ); // check size of the result image if ( ( width != filteredImage1.Width ) || ( height != filteredImage1.Height ) ) { filteredImage1.Dispose( ); throw new ApplicationException( "Filters should not change image dimension." ); } // convert 1st image to RGB if required if ( filteredImage1.PixelFormat == PixelFormat.Format8bppIndexed ) { GrayscaleToRGB coloringFilter = new GrayscaleToRGB( ); UnmanagedImage temp = coloringFilter.Apply( filteredImage1 ); filteredImage1.Dispose( ); filteredImage1 = temp; } UnmanagedImage filteredImage2 = null; // apply second filter, if it was specified if ( filter2 != null ) { filteredImage2 = filter2.Apply( sourceData ); // check size of the result image if ( ( width != filteredImage2.Width ) || ( height != filteredImage2.Height ) ) { filteredImage1.Dispose( ); filteredImage2.Dispose( ); // we are not handling such situations yet throw new ApplicationException( "Filters should not change image dimension." ); } // convert 2nd image to RGB if required if ( filteredImage2.PixelFormat == PixelFormat.Format8bppIndexed ) { GrayscaleToRGB coloringFilter = new GrayscaleToRGB( ); UnmanagedImage temp = coloringFilter.Apply( filteredImage2 ); filteredImage2.Dispose( ); filteredImage2 = temp; } } // use source image as a second image, if second filter is not set if ( filteredImage2 == null ) { filteredImage2 = sourceData; } // do the job unsafe { byte* dst = (byte*) destinationData.ImageData.ToPointer( ); byte* src1 = (byte*) filteredImage1.ImageData.ToPointer( ); byte* src2 = (byte*) filteredImage2.ImageData.ToPointer( ); int dstOffset = destinationData.Stride - 3 * width; int src1Offset = filteredImage1.Stride - 3 * width; int src2Offset = filteredImage2.Stride - 3 * width; if ( preserveLevel != 0.0 ) { // for each line for ( int y = 0; y < height; y++ ) { // for each pixel for ( int x = 0; x < width; x++ ) { double t1 = texture[y, x]; double t2 = 1 - t1; for ( int i = 0; i < 3; i++, src1++, src2++, dst++ ) { *dst = (byte) Math.Min( 255.0f, filterLevel * ( t1 * ( *src1 ) + t2 * ( *src2 ) ) + preserveLevel * ( *src2 ) ); } } src1 += src1Offset; src2 += src2Offset; dst += dstOffset; } } else { // for each line for ( int y = 0; y < height; y++ ) { // for each pixel for ( int x = 0; x < width; x++ ) { double t1 = texture[y, x]; double t2 = 1 - t1; for ( int i = 0; i < 3; i++, src1++, src2++, dst++ ) { *dst = (byte) Math.Min( 255.0f, t1 * *src1 + t2 * *src2 ); } } src1 += src1Offset; src2 += src2Offset; dst += dstOffset; } } } // dispose temp images filteredImage1.Dispose( ); if ( filteredImage2 != sourceData ) { filteredImage2.Dispose( ); } }
// Apply filter using texture // The higher intensity in texture - the more filter1 is used // public Bitmap Apply(Bitmap srcImg) { int width = srcImg.Width; int height = srcImg.Height; if (textureGenerator != null) { // create new texture, if generator was provided texture = textureGenerator.Generate(width, height); } else { // check existing texture if ((texture.GetLength(0) != height) || (texture.GetLength(1) != width)) { // sorry, but source image must have the same dimension as texture throw new ArgumentException("Texture size does not match image size"); } } Bitmap dstImg = filter1.Apply(srcImg); bool disposeSrc = false; // check destination size if ((width != dstImg.Width) || (height != dstImg.Height)) { dstImg.Dispose( ); // we are not handling such situations yet throw new ApplicationException( ); } // apply filter2 also, if it is if (filter2 != null) { srcImg = filter2.Apply(srcImg); disposeSrc = true; // check source size if ((width != srcImg.Width) || (height != srcImg.Height)) { srcImg.Dispose( ); dstImg.Dispose( ); // we are not handling such situations yet throw new ApplicationException( ); } } // check pixel formats if (dstImg.PixelFormat != srcImg.PixelFormat) { IFilter f = new GrayscaleToRGB( ); // convert temp image to RGB format if (dstImg.PixelFormat == PixelFormat.Format8bppIndexed) { Bitmap t = f.Apply(dstImg); dstImg.Dispose( ); dstImg = t; } // convert source image to RGB format if (srcImg.PixelFormat == PixelFormat.Format8bppIndexed) { Bitmap t = f.Apply(srcImg); if (disposeSrc) { srcImg.Dispose( ); } srcImg = t; disposeSrc = true; } } // lock source bitmap data BitmapData srcData = srcImg.LockBits( new Rectangle(0, 0, width, height), ImageLockMode.ReadOnly, srcImg.PixelFormat); // lock destination bitmap data BitmapData dstData = dstImg.LockBits( new Rectangle(0, 0, width, height), ImageLockMode.ReadWrite, dstImg.PixelFormat); int pixelSize = (dstImg.PixelFormat == PixelFormat.Format8bppIndexed) ? 1 : 3; int offset = dstData.Stride - width * pixelSize; // do the job unsafe { byte *src = (byte *)srcData.Scan0.ToPointer( ); byte *dst = (byte *)dstData.Scan0.ToPointer( ); if (preserveLevel != 0.0) { // for each line for (int y = 0; y < height; y++) { // for each pixel for (int x = 0; x < width; x++) { double t = texture[y, x]; for (int i = 0; i < pixelSize; i++, src++, dst++) { *dst = (byte)Math.Min(255.0f, (preserveLevel * *src) + (filterLevel * *dst) * t); } } src += offset; dst += offset; } } else { // for each line for (int y = 0; y < height; y++) { // for each pixel for (int x = 0; x < width; x++) { double t1 = texture[y, x]; double t2 = 1 - t1; for (int i = 0; i < pixelSize; i++, src++, dst++) { *dst = (byte)Math.Min(255.0f, t1 * *dst + t2 * *src); } } src += offset; dst += offset; } } } // unlock all images dstImg.UnlockBits(dstData); srcImg.UnlockBits(srcData); // dispose source ? if (disposeSrc) { srcImg.Dispose( ); } // return result return(dstImg); }
// Apply filter using mask // filter1 is applied to all black regions of the mask // filter2 is applied to all white regions of the mask // public Bitmap Apply(Bitmap srcImg) { int width = mask.Width; int height = mask.Height; // check source size if ((width != srcImg.Width) || (height != srcImg.Height)) { // sorry, but source image must have the same dimension as mask image throw new ArgumentException(); } Bitmap dstImg = filter1.Apply(srcImg); bool disposeSrc = false; // check destination size if ((width != dstImg.Width) || (height != dstImg.Height)) { dstImg.Dispose(); // we are not handling such situations yet throw new ApplicationException(); } // apply filter2 also, if it is if (filter2 != null) { srcImg = filter2.Apply(srcImg); disposeSrc = true; // check source size if ((width != srcImg.Width) || (height != srcImg.Height)) { srcImg.Dispose(); dstImg.Dispose(); // we are not handling such situations yet throw new ApplicationException(); } } // check pixel formats if (dstImg.PixelFormat != srcImg.PixelFormat) { IFilter f = new GrayscaleToRGB(); // convert temp image to RGB format if (dstImg.PixelFormat == PixelFormat.Format8bppIndexed) { Bitmap t = f.Apply(dstImg); dstImg.Dispose(); dstImg = t; } // convert source image to RGB format if (srcImg.PixelFormat == PixelFormat.Format8bppIndexed) { Bitmap t = f.Apply(srcImg); if (disposeSrc) { srcImg.Dispose(); } srcImg = t; disposeSrc = true; } } // lock source bitmap data BitmapData srcData = srcImg.LockBits( new Rectangle(0, 0, width, height), ImageLockMode.ReadOnly, srcImg.PixelFormat); // lock destination bitmap data BitmapData dstData = dstImg.LockBits( new Rectangle(0, 0, width, height), ImageLockMode.ReadWrite, dstImg.PixelFormat); // lock mask bitmap data BitmapData maskData = mask.LockBits( new Rectangle(0, 0, width, height), ImageLockMode.ReadOnly, mask.PixelFormat); int pixelSize = (dstImg.PixelFormat == PixelFormat.Format8bppIndexed) ? 1 : 3; int offset = dstData.Stride - width * pixelSize; int maskInc = (mask.PixelFormat == PixelFormat.Format8bppIndexed) ? 1 : 3; int maskOffset = maskData.Stride - width * maskInc; // do the job unsafe { byte *src = (byte *)srcData.Scan0.ToPointer(); byte *dst = (byte *)dstData.Scan0.ToPointer(); byte *m = (byte *)maskData.Scan0.ToPointer(); // for each line for (int y = 0; y < height; y++) { // for each pixel for (int x = 0; x < width; x++, m += maskInc) { if (*m != 0) { for (int i = 0; i < pixelSize; i++, src++, dst++) { *dst = *src; } } else { src += pixelSize; dst += pixelSize; } } src += offset; dst += offset; m += maskOffset; } } // unlock all images dstImg.UnlockBits(dstData); srcImg.UnlockBits(srcData); mask.UnlockBits(maskData); // dispose source ? if (disposeSrc) { srcImg.Dispose(); } // return result return(dstImg); }
private void TakeSnapshot_funct(Bitmap img) { Bitmap image = Grayscale.CommonAlgorithms.RMY.Apply(img); // find edges SobelEdgeDetector EdgeFilter = new SobelEdgeDetector(); EdgeFilter.ApplyInPlace(image); // back to color format GrayscaleToRGB RGBfilter = new GrayscaleToRGB(); image = RGBfilter.Apply(image); // get rid of grays EuclideanColorFiltering filter = new EuclideanColorFiltering(); filter.CenterColor.Red = 20; filter.CenterColor.Green = 20; filter.CenterColor.Blue = 20; filter.FillOutside = false; filter.Radius = 200; filter.ApplyInPlace(image); Color peek; for (int y = 0; y < image.Height; y++) { for (int x = 0; x < image.Width; x++) { peek = image.GetPixel(x, y); if (peek.R != 0) { image.SetPixel(x, y, Color.Blue); } } } image.MakeTransparent(Color.Black); SnapshotImage = image; SnapshotOriginalImage = image; }
// On new video frame private void videoSourcePlayer_NewFrame(object sender, ref Bitmap image) { if (image.PixelFormat == PixelFormat.Format8bppIndexed) { // convert image to RGB if it is grayscale var filter = new GrayscaleToRGB(); var temp = filter.Apply(image); image.Dispose(); image = temp; } lock (_sync) { var glyphs = _imageProcessor.ProcessImage(image); if (AppGlobals.Robot != null) RunRobot(glyphs); } }
// ========================================================= private void ThresholdFunct(ref Bitmap frame, double par_d) { try { frame = Grayscale.CommonAlgorithms.RMY.Apply(frame); Threshold filter = new Threshold((int)par_d); filter.ApplyInPlace(frame); GrayscaleToRGB toColFilter = new GrayscaleToRGB(); frame = toColFilter.Apply(frame); } catch { } }
// ========================================================= Contrast_scretchFunc private void GrayscaleFunc(ref Bitmap frame) { Grayscale toGrFilter = new Grayscale(0.2125, 0.7154, 0.0721); // create grayscale filter (BT709) Bitmap fr = toGrFilter.Apply(frame); GrayscaleToRGB toColFilter = new GrayscaleToRGB(); frame = toColFilter.Apply(fr); }
// ========================================================= private void Edge_detectFunc(ref Bitmap frame, int par_int) { frame = Grayscale.CommonAlgorithms.RMY.Apply(frame); // Make gray switch (par_int) { case 1: SobelEdgeDetector SobelFilter = new SobelEdgeDetector(); SobelFilter.ApplyInPlace(frame); break; case 2: DifferenceEdgeDetector DifferenceFilter = new DifferenceEdgeDetector(); DifferenceFilter.ApplyInPlace(frame); break; case 3: HomogenityEdgeDetector HomogenityFilter = new HomogenityEdgeDetector(); HomogenityFilter.ApplyInPlace(frame); break; case 4: // can we not have references to canny in the code. gives me ptsd flashbacks CannyEdgeDetector Nightmare = new CannyEdgeDetector(); // apply the filter Nightmare.ApplyInPlace(frame); break; default: HomogenityEdgeDetector filter = new HomogenityEdgeDetector(); filter.ApplyInPlace(frame); break; } GrayscaleToRGB RGBfilter = new GrayscaleToRGB(); // back to color format frame = RGBfilter.Apply(frame); }
// On new video frame private void videoSourcePlayer_NewFrame( object sender, ref Bitmap image ) { if ( activeGlyphDatabase != null ) { if ( image.PixelFormat == PixelFormat.Format8bppIndexed ) { // convert image to RGB if it is grayscale GrayscaleToRGB filter = new GrayscaleToRGB( ); Bitmap temp = filter.Apply( image ); image.Dispose( ); image = temp; } lock ( sync ) { List<ExtractedGlyphData> glyphs = imageProcessor.ProcessImage( image ); if ( arForm != null ) { List<VirtualModel> modelsToDisplay = new List<VirtualModel>( ); foreach ( ExtractedGlyphData glyph in glyphs ) { if ( ( glyph.RecognizedGlyph != null ) && ( glyph.RecognizedGlyph.UserData != null ) && ( glyph.RecognizedGlyph.UserData is GlyphVisualizationData ) && ( glyph.IsTransformationDetected ) ) { modelsToDisplay.Add( new VirtualModel( ( (GlyphVisualizationData) glyph.RecognizedGlyph.UserData ).ModelName, glyph.TransformationMatrix, imageProcessor.GlyphSize ) ); } } arForm.UpdateScene( image, modelsToDisplay ); } } } }
/// <summary> /// Process the filter on the specified image. /// </summary> /// /// <param name="sourceData">Source image data.</param> /// <param name="destinationData">Destination image data.</param> /// /// <exception cref="InvalidImagePropertiesException">Texture size does not match image size.</exception> /// <exception cref="ApplicationException">Filters should not change image dimension.</exception> /// protected override unsafe void ProcessFilter(UnmanagedImage sourceData, UnmanagedImage destinationData) { // get source image dimension int width = sourceData.Width; int height = sourceData.Height; // if generator was specified, then generate a texture // otherwise use provided texture if (textureGenerator != null) { texture = textureGenerator.Generate(width, height); } else { // check existing texture if ((texture.GetLength(0) != height) || (texture.GetLength(1) != width)) { // sorry, but source image must have the same dimension as texture throw new InvalidImagePropertiesException("Texture size does not match image size."); } } // apply first filter UnmanagedImage filteredImage1 = filter1.Apply(sourceData); // check size of the result image if ((width != filteredImage1.Width) || (height != filteredImage1.Height)) { filteredImage1.Dispose( ); throw new ApplicationException("Filters should not change image dimension."); } // convert 1st image to RGB if required if (filteredImage1.PixelFormat == PixelFormat.Format8bppIndexed) { GrayscaleToRGB coloringFilter = new GrayscaleToRGB( ); UnmanagedImage temp = coloringFilter.Apply(filteredImage1); filteredImage1.Dispose( ); filteredImage1 = temp; } UnmanagedImage filteredImage2 = null; // apply second filter, if it was specified if (filter2 != null) { filteredImage2 = filter2.Apply(sourceData); // check size of the result image if ((width != filteredImage2.Width) || (height != filteredImage2.Height)) { filteredImage1.Dispose( ); filteredImage2.Dispose( ); // we are not handling such situations yet throw new ApplicationException("Filters should not change image dimension."); } // convert 2nd image to RGB if required if (filteredImage2.PixelFormat == PixelFormat.Format8bppIndexed) { GrayscaleToRGB coloringFilter = new GrayscaleToRGB( ); UnmanagedImage temp = coloringFilter.Apply(filteredImage2); filteredImage2.Dispose( ); filteredImage2 = temp; } } // use source image as a second image, if second filter is not set if (filteredImage2 == null) { filteredImage2 = sourceData; } // do the job unsafe { byte *dst = (byte *)destinationData.ImageData.ToPointer( ); byte *src1 = (byte *)filteredImage1.ImageData.ToPointer( ); byte *src2 = (byte *)filteredImage2.ImageData.ToPointer( ); int dstOffset = destinationData.Stride - 3 * width; int src1Offset = filteredImage1.Stride - 3 * width; int src2Offset = filteredImage2.Stride - 3 * width; if (preserveLevel != 0.0) { // for each line for (int y = 0; y < height; y++) { // for each pixel for (int x = 0; x < width; x++) { double t1 = texture[y, x]; double t2 = 1 - t1; for (int i = 0; i < 3; i++, src1++, src2++, dst++) { *dst = (byte)Math.Min(255.0f, filterLevel * (t1 * (*src1) + t2 * (*src2)) + preserveLevel * (*src2)); } } src1 += src1Offset; src2 += src2Offset; dst += dstOffset; } } else { // for each line for (int y = 0; y < height; y++) { // for each pixel for (int x = 0; x < width; x++) { double t1 = texture[y, x]; double t2 = 1 - t1; for (int i = 0; i < 3; i++, src1++, src2++, dst++) { *dst = (byte)Math.Min(255.0f, t1 * *src1 + t2 * *src2); } } src1 += src1Offset; src2 += src2Offset; dst += dstOffset; } } } // dispose temp images filteredImage1.Dispose( ); if (filteredImage2 != sourceData) { filteredImage2.Dispose( ); } }
private void videoSourcePlayer_NewFrame(object sender, ref Bitmap image) { if (activeGlyphDatabase != null) { if (image.PixelFormat == PixelFormat.Format8bppIndexed) { // convert image to RGB if it is grayscale GrayscaleToRGB filter = new GrayscaleToRGB(); Bitmap temp = filter.Apply(image); image.Dispose(); image = temp; } // ### Glyph Work Distribution lock (sync) { List<ExtractedGlyphData> glyphs = imageProcessor.ProcessImage(image); try { switch (glyphs[0].RecognizedGlyph.Name) { case "D": // Cross hair glyph { controlvar = 1; break; } case "U": // U shaped glyph { AForge.IntPoint[] cord = glyphs[0].RecognizedQuadrilateral.ToArray(); auddevice.AudioEndpointVolume.MasterVolumeLevelScalar = 1 - ((float)cord[0].Y / 480.0f); break; } case "T": // T shaped glyph { controlvar = 2; break; } } } catch { } if (showcount == true) { showcount = false; MessageBox.Show(glyphs.Count.ToString()); }}} }
//All Processing Happens Here //======================================================================================================================== //888888888888888888888888888888888888888888888888888888888888888888888888888888888888888888888888888888888888888888888888 //======================================================================================================================== // On new video frame private void videoSourcePlayer_NewFrame(object sender, ref Bitmap image) { // ### if (activeGlyphDatabase != null) { if (image.PixelFormat == PixelFormat.Format8bppIndexed) { // convert image to RGB if it is grayscale GrayscaleToRGB filter = new GrayscaleToRGB(); Bitmap temp = filter.Apply(image); image.Dispose(); image = temp; } lock (sync) { List<ExtractedGlyphData> glyphs = imageProcessor.ProcessImage(image); try { switch (glyphs[0].RecognizedGlyph.Name) { case "Coca Cola": // Coca Cola { controlvar = 1; break; } case "Breakout Room": // Breakout Room { controlvar = 2; break; } case "Bus 6": // Bus 6 - Lindbergh { controlvar = 3; break; } case "Building": // Building { controlvar = 4; break; } case "Hospital Room": // Hospital Room { controlvar = 5; break; } case "Medicine": // Medicine { controlvar = 6; break; } } } catch { } if (showcount == true) { showcount = false; MessageBox.Show(glyphs.Count.ToString()); } } } }
// Apply filter using mask // filter1 is applied to all black regions of the mask // filter2 is applied to all white regions of the mask // public Bitmap Apply(Bitmap srcImg) { int width = mask.Width; int height = mask.Height; // check source size if ((width != srcImg.Width) || (height != srcImg.Height)) { // sorry, but source image must have the same dimension as mask image throw new ArgumentException(); } Bitmap dstImg = filter1.Apply(srcImg); bool disposeSrc = false; // check destination size if ((width != dstImg.Width) || (height != dstImg.Height)) { dstImg.Dispose(); // we are not handling such situations yet throw new ApplicationException(); } // apply filter2 also, if it is if (filter2 != null) { srcImg = filter2.Apply(srcImg); disposeSrc = true; // check source size if ((width != srcImg.Width) || (height != srcImg.Height)) { srcImg.Dispose(); dstImg.Dispose(); // we are not handling such situations yet throw new ApplicationException(); } } // check pixel formats if (dstImg.PixelFormat != srcImg.PixelFormat) { IFilter f = new GrayscaleToRGB(); // convert temp image to RGB format if (dstImg.PixelFormat == PixelFormat.Format8bppIndexed) { Bitmap t = f.Apply(dstImg); dstImg.Dispose(); dstImg = t; } // convert source image to RGB format if (srcImg.PixelFormat == PixelFormat.Format8bppIndexed) { Bitmap t = f.Apply(srcImg); if (disposeSrc) srcImg.Dispose(); srcImg = t; disposeSrc = true; } } // lock source bitmap data BitmapData srcData = srcImg.LockBits( new Rectangle(0, 0, width, height), ImageLockMode.ReadOnly, srcImg.PixelFormat); // lock destination bitmap data BitmapData dstData = dstImg.LockBits( new Rectangle(0, 0, width, height), ImageLockMode.ReadWrite, dstImg.PixelFormat); // lock mask bitmap data BitmapData maskData = mask.LockBits( new Rectangle(0, 0, width, height), ImageLockMode.ReadOnly, mask.PixelFormat); int pixelSize = (dstImg.PixelFormat == PixelFormat.Format8bppIndexed) ? 1 : 3; int offset = dstData.Stride - width * pixelSize; int maskInc = (mask.PixelFormat == PixelFormat.Format8bppIndexed) ? 1 : 3; int maskOffset = maskData.Stride - width * maskInc; // do the job unsafe { byte * src = (byte *) srcData.Scan0.ToPointer(); byte * dst = (byte *) dstData.Scan0.ToPointer(); byte * m = (byte *) maskData.Scan0.ToPointer(); // for each line for (int y = 0; y < height; y++) { // for each pixel for (int x = 0; x < width; x++, m += maskInc) { if (*m != 0) { for (int i = 0; i < pixelSize; i++, src++, dst++ ) { *dst = *src; } } else { src += pixelSize; dst += pixelSize; } } src += offset; dst += offset; m += maskOffset; } } // unlock all images dstImg.UnlockBits(dstData); srcImg.UnlockBits(srcData); mask.UnlockBits(maskData); // dispose source ? if (disposeSrc) srcImg.Dispose(); // return result return dstImg; }
// On new video frame private void videoSourcePlayer_NewFrame( object sender, ref Bitmap image ) { if ( activeGlyphDatabase != null ) { if ( image.PixelFormat == PixelFormat.Format8bppIndexed ) { // convert image to RGB if it is grayscale GrayscaleToRGB filter = new GrayscaleToRGB( ); Bitmap temp = filter.Apply( image ); image.Dispose( ); image = temp; } lock ( sync ) { List<ExtractedGlyphData> glyphs = imageProcessor.ProcessImage( image ); EventHandler<FrameData> temp = frameProcessed; if (temp != null) { temp(this, new FrameData(glyphs, image)); } } } }
private void video_NewFrame(object sender, NewFrameEventArgs eventArgs) { lock(this.syncLock) { // Dispose last frame. if (this.capturedImage != null) { //this.capturedImage.Dispose(); } // Clone the content. this.capturedImage = (Bitmap)eventArgs.Frame.Clone(); // Exif there is a problem with data cloning. if (this.capturedImage == null) { return; } // Convert image to RGB if it is grayscale. if (this.capturedImage.PixelFormat == PixelFormat.Format8bppIndexed) { GrayscaleToRGB filter = new GrayscaleToRGB(); Bitmap temp = filter.Apply(this.capturedImage); this.capturedImage.Dispose(); this.capturedImage = temp; } //TODO: Make preprocessing hear if it is needed. // Create tmp buffer. List<ExtractedGlyphData> tmpGlyps = recognizer.FindGlyphs(this.capturedImage); // Rewrite the glyph buffer. this.recognisedGlyphs = tmpGlyps; // Display image data. this.DisplayGlyphData(this.recognisedGlyphs, "Test1"); // this.DisplayGlyphs(this.capturedImage, this.recognisedGlyphs); } }
// ========================================================= Contrast_scretchFunc private void GrayscaleFunc(ref Bitmap frame, int par_int, double par_d, int par_R, int par_G, int par_B) { Grayscale toGrFilter = new Grayscale(0.2125, 0.7154, 0.0721); // create grayscale MirrFilter (BT709) Bitmap fr = toGrFilter.Apply(frame); GrayscaleToRGB toColFilter = new GrayscaleToRGB(); frame = toColFilter.Apply(fr); }
static void Main(string[] args) { Threshold thresh = new Threshold(10); Median median = new Median(9); Erosion3x3 erode = new Erosion3x3(); Dilatation3x3 dilate = new Dilatation3x3(); GrahamConvexHull hullFinder = new GrahamConvexHull(); ConnectedComponentsLabeling ccLabeler = new ConnectedComponentsLabeling(); BorderFollowing contourFinder = new BorderFollowing(); GrayscaleToRGB rgb = new GrayscaleToRGB(); ConvexHullDefects defectFinder = new ConvexHullDefects(10); Bitmap img = (Bitmap)Bitmap.FromFile("hand3.jpg"); Bitmap image = Grayscale.CommonAlgorithms.BT709.Apply(img); thresh.ApplyInPlace(image); //median.ApplyInPlace(image); erode.ApplyInPlace(image); dilate.ApplyInPlace(image); BlobCounter counter = new BlobCounter(image); counter.ObjectsOrder = ObjectsOrder.Area; Blob[] blobs = counter.GetObjectsInformation(); if (blobs.Length > 0) { counter.ExtractBlobsImage(image, blobs[0], true); UnmanagedImage hand = blobs[0].Image; var contour = contourFinder.FindContour(hand); if (contour.Count() > 0) { var initialHull = hullFinder.FindHull(contour); var defects = defectFinder.FindDefects(contour, initialHull); var filteredHull = initialHull.ClusterHullPoints().FilterLinearHullPoints(); var palmCenter = defects.Centroid(contour); var wristPoints = filteredHull.SelectWristPoints(defects, contour); Bitmap color = rgb.Apply(hand).ToManagedImage(); //BitmapData data = color.LockBits(new Rectangle(0, 0, color.Width, color.Height), ImageLockMode.ReadWrite, color.PixelFormat); //Drawing.Polyline(data, contour, Color.Blue); //Drawing.Polygon(data, filteredHull, Color.Red); //color.UnlockBits(data); Graphics gr = Graphics.FromImage(color); gr.DrawPolygon(new Pen(Brushes.Red, 3), filteredHull.ToPtArray()); gr.DrawLines(new Pen(Brushes.Blue, 3), contour.ToPtArray()); gr.DrawEllipse(new Pen(Brushes.Red, 3), palmCenter.X - 10, palmCenter.Y - 10, 20, 20); foreach (ConvexityDefect defect in defects) { gr.DrawEllipse(new Pen(Brushes.Green, 6), contour[defect.Point].X - 10, contour[defect.Point].Y - 10, 20, 20); } foreach (AForge.IntPoint pt in filteredHull) { gr.DrawEllipse(new Pen(Brushes.Yellow, 6), pt.X - 10, pt.Y - 10, 20, 20); } foreach (AForge.IntPoint pt in wristPoints) { gr.DrawEllipse(new Pen(Brushes.PowderBlue, 6), pt.X - 10, pt.Y - 10, 20, 20); } ImageBox.Show(color); } } }
// ========================================================= private void ThresholdFunct(ref Bitmap frame, int par_int, double par_d, int par_R, int par_G, int par_B) { frame = Grayscale.CommonAlgorithms.RMY.Apply(frame); Threshold filter = new Threshold(par_int); filter.ApplyInPlace(frame); GrayscaleToRGB toColFilter = new GrayscaleToRGB(); frame = toColFilter.Apply(frame); }
// Apply filter using texture // The higher intensity in texture - the more filter1 is used // public Bitmap Apply( Bitmap srcImg ) { int width = srcImg.Width; int height = srcImg.Height; if ( textureGenerator != null ) { // create new texture, if generator was provided texture = textureGenerator.Generate( width, height ); } else { // check existing texture if ( ( texture.GetLength( 0 ) != height ) || ( texture.GetLength( 1 ) != width ) ) { // sorry, but source image must have the same dimension as texture throw new ArgumentException( "Texture size does not match image size" ); } } Bitmap dstImg = filter1.Apply( srcImg ); bool disposeSrc = false; // check destination size if ( (width != dstImg.Width ) || ( height != dstImg.Height ) ) { dstImg.Dispose( ); // we are not handling such situations yet throw new ApplicationException( ); } // apply filter2 also, if it is if ( filter2 != null ) { srcImg = filter2.Apply( srcImg ); disposeSrc = true; // check source size if ( ( width != srcImg.Width ) || ( height != srcImg.Height) ) { srcImg.Dispose( ); dstImg.Dispose( ); // we are not handling such situations yet throw new ApplicationException( ); } } // check pixel formats if ( dstImg.PixelFormat != srcImg.PixelFormat ) { IFilter f = new GrayscaleToRGB( ); // convert temp image to RGB format if ( dstImg.PixelFormat == PixelFormat.Format8bppIndexed ) { Bitmap t = f.Apply( dstImg ); dstImg.Dispose( ); dstImg = t; } // convert source image to RGB format if ( srcImg.PixelFormat == PixelFormat.Format8bppIndexed ) { Bitmap t = f.Apply( srcImg ); if ( disposeSrc ) srcImg.Dispose( ); srcImg = t; disposeSrc = true; } } // lock source bitmap data BitmapData srcData = srcImg.LockBits( new Rectangle( 0, 0, width, height ), ImageLockMode.ReadOnly, srcImg.PixelFormat ); // lock destination bitmap data BitmapData dstData = dstImg.LockBits( new Rectangle( 0, 0, width, height ), ImageLockMode.ReadWrite, dstImg.PixelFormat ); int pixelSize = (dstImg.PixelFormat == PixelFormat.Format8bppIndexed) ? 1 : 3; int offset = dstData.Stride - width * pixelSize; // do the job unsafe { byte * src = (byte *) srcData.Scan0.ToPointer( ); byte * dst = (byte *) dstData.Scan0.ToPointer( ); if ( preserveLevel != 0.0 ) { // for each line for ( int y = 0; y < height; y++ ) { // for each pixel for ( int x = 0; x < width; x++ ) { double t = texture[y, x]; for ( int i = 0; i < pixelSize; i++, src++, dst++ ) { *dst = (byte) Math.Min( 255.0f, ( preserveLevel * *src ) + ( filterLevel * *dst ) * t ); } } src += offset; dst += offset; } } else { // for each line for ( int y = 0; y < height; y++ ) { // for each pixel for ( int x = 0; x < width; x++ ) { double t1 = texture[y, x]; double t2 = 1 - t1; for ( int i = 0; i < pixelSize; i++, src++, dst++ ) { *dst = (byte) Math.Min( 255.0f, t1 * *dst + t2 * *src ); } } src += offset; dst += offset; } } } // unlock all images dstImg.UnlockBits( dstData ); srcImg.UnlockBits( srcData ); // dispose source ? if ( disposeSrc ) srcImg.Dispose( ); // return result return dstImg; }