public Bitmap Closing(Bitmap bitmap) { var bmp = ReverseBitmapColors(bitmap); Closing filter = new Closing(); filter.ApplyInPlace(bmp); return(ReverseBitmapColors(bmp)); }
public static Bitmap ProcessImage(string imagePath) { var img = AForge.Imaging.Image.FromFile(imagePath); ContrastStretch filterContrastStretch = new ContrastStretch(); filterContrastStretch.ApplyInPlace(img); try { img = Grayscale.CommonAlgorithms.BT709.Apply(img); } catch { System.Windows.Forms.MessageBox.Show("The image should not be grayscale"); } Opening filterOpening = new Opening(); filterOpening.ApplyInPlace(img); SobelEdgeDetector sobel = new SobelEdgeDetector(); sobel.ApplyInPlace(img); Closing filterClosing = new Closing(); filterClosing.ApplyInPlace(img); Threshold threshold = new Threshold(100); threshold.ApplyInPlace(img); FillHoles fillHoles = new FillHoles(); fillHoles.MaxHoleWidth = img.Width; fillHoles.MaxHoleHeight = img.Height; fillHoles.CoupledSizeFiltering = false; fillHoles.ApplyInPlace(img); filterOpening.ApplyInPlace(img); Erosion filterErosion = new Erosion(); filterErosion.ApplyInPlace(img); return(img); }
void ClosingToolStripMenuItemClick(object sender, EventArgs e) { //jika gambar kosong/null maka akan mengembalikan nilai kosong/null if (gambar == null) { return; } //membuat filter dari inisiasi class Closing() pada objek closing Closing closing = new Closing( ); //clone variable gambar pada variable gambar2 gambar2 = (Bitmap)gambar.Clone(); //aplikasikan filter objek closing pada gambar2 closing.ApplyInPlace(gambar2); //tampilkan hasil gambar2 yang sudah diaplikasikan filter pada pictureBox2 pictureBox2.Image = gambar2; }
/// <summary> /// This method binarize image. /// </summary> /// <param name="image"></param> /// <returns></returns> private Bitmap Binarize(Bitmap image) { Median median = new Median(); median.ApplyInPlace(image); var bmp8bpp = Grayscale.CommonAlgorithms.BT709.Apply(image); OtsuThreshold OtsuThreshold = new OtsuThreshold(); OtsuThreshold.ApplyInPlace(bmp8bpp); //bmp8bpp.Save(@"C:\Users\Maor\Desktop\mode\BinarizeWithoutDilation.jpeg"); Closing closing = new Closing(); closing.ApplyInPlace(bmp8bpp); return(bmp8bpp); }
/// <summary> /// This method binarize dnd dilate image. /// </summary> /// <param name="image"></param> /// <returns></returns> private Bitmap BinarizeAndDilationWithMedian(Bitmap image) { int[,] kernel = { { -2, -1, 0 }, { -1, 1, 1 }, { 0, 1, 2 } }; // create filter Convolution Convolution = new Convolution(kernel); // apply the filter Convolution.ApplyInPlace(image); var bmp8bpp = Grayscale.CommonAlgorithms.BT709.Apply(image); Invert invert = new Invert(); invert.ApplyInPlace(bmp8bpp); Dilatation dilatation = new Dilatation(); dilatation.ApplyInPlace(bmp8bpp); invert.ApplyInPlace(bmp8bpp); Median median = new Median(); median.ApplyInPlace(bmp8bpp); Closing closing = new Closing(); closing.ApplyInPlace(bmp8bpp); OtsuThreshold OtsuThreshold = new OtsuThreshold(); OtsuThreshold.ApplyInPlace(bmp8bpp); //bmp8bpp.Save(@"C:\Users\Maor\Desktop\mode\BinarizeAndDilationWithMedian.jpeg"); return(bmp8bpp); }
private void skinDetectToolStripMenuItem_Click(object sender, EventArgs e) { List <List <double> > Listofvectors = new List <List <double> >(); System.IO.StreamWriter file = new System.IO.StreamWriter(@"C:\Users\gsrip\Documents\MyDocuments\Saarthi AI and IP\Segmented\segmented.txt", append: true); String alphabets = "0ABCDEFGHIJKLMNOPQRSTUVWXYZ"; for (int index = 1; index <= 26; index++) { //for each folder select all filenames filenames = Directory.GetFiles(dirnames[index - 1]); int n = 0; foreach (string filename in filenames) { //load an image in a bitmap Bitmap bmplocal = new Bitmap(filename); int height = 300, width = 300; bmp = new Bitmap(bmplocal, width, height); pictureBox1.Image = new Bitmap(bmp); pictureBox1.SizeMode = PictureBoxSizeMode.StretchImage; using (bmp) using (skinBmp = new Bitmap(bmp.Width, bmp.Height)) { //skin detection for (int x = 0; x < bmp.Width; x++) { for (int y = 0; y < bmp.Height; y++) { Color pixel = bmp.GetPixel(x, y); int red = pixel.R; int blue = pixel.B; int green = pixel.G; int max = Math.Max(red, Math.Max(green, blue)); int min = Math.Min(red, Math.Min(green, blue)); int rgdif = red - green; int abs = Math.Abs(rgdif); if (red > 95 && green > 40 && blue > 20 && max - min > 15 && abs > 15 && red > green && red > blue) { skinBmp.SetPixel(x, y, pixel); } } } pictureBox2.Image = new Bitmap(skinBmp); //grayscale filter (BT709) Grayscale filter1 = new Grayscale(0.2125, 0.7154, 0.0721); Bitmap newImage = new Bitmap(bmp); Bitmap grayImage = filter1.Apply(newImage); Threshold filter2 = new Threshold(100); Bitmap bwImage = filter2.Apply(grayImage); Closing filter5 = new Closing(); filter5.ApplyInPlace(bwImage); Opening filter3 = new Opening(); filter3.ApplyInPlace(bwImage); ExtractBiggestBlob filter4 = new ExtractBiggestBlob(); Bitmap biggestBlobsImage = filter4.Apply(bwImage); ExtractBiggestBlob filter6 = new ExtractBiggestBlob(); Bitmap biggestBlobsImage1 = filter6.Apply((Bitmap)pictureBox2.Image); Bitmap orgimage = new Bitmap(biggestBlobsImage1, 300, 300); Bitmap blobimage = new Bitmap(biggestBlobsImage, 300, 300); Bitmap newimage = new Bitmap(300, 300); //anding the two images for (int x = 0; x < 300; x++) { for (int y = 0; y < 300; y++) { Color pixel1 = orgimage.GetPixel(x, y); Color pixel2 = blobimage.GetPixel(x, y); int red1 = pixel1.R, red2 = pixel2.R; int blue1 = pixel1.B, blue2 = pixel2.B; int green1 = pixel1.G, green2 = pixel2.G; int newred, newblue, newgreen; newred = red1 & red2; newblue = blue1 & blue2; newgreen = green1 & green2; Color newpixel = Color.FromArgb(newred, newgreen, newblue); newimage.SetPixel(x, y, newpixel); } } CannyEdgeDetector filter7 = new CannyEdgeDetector(); Grayscale filter = new Grayscale(0.2125, 0.7154, 0.0721); Bitmap edges = filter.Apply(newimage); filter7.ApplyInPlace(edges); pictureBox3.Image = new Bitmap(edges); String location = "C:\\Users\\gsrip\\Documents\\MyDocuments\\Saarthi AI and IP\\Segmented\\"; location = location + alphabets[index].ToString() + "\\image"; newimage.Save(@location + (n++).ToString() + ".jpg"); List <int> featureVector = new List <int>(); for (int i = 0; i < 6; i++) { for (int j = 0; j < 6; j++) { int count = 0; for (int x = i * 50; x < (i * 50) + 50; x++) { for (int y = j * 50; y < (j * 50) + 50; y++) { Color pixel = edges.GetPixel(x, y); if (pixel.R != 0 && pixel.G != 0 && pixel.B != 0) { count++; } } } featureVector.Add(count); } } int sumofvector = featureVector.Sum(); List <double> featureVectorNorm = new List <double>(); foreach (var d in featureVector) { featureVectorNorm.Add((double)d / sumofvector); } Listofvectors.Add(featureVectorNorm); }//end of using } // end of foreach filename foreach (var vector in Listofvectors) { String line = index.ToString() + ": "; //Console.WriteLine(value); foreach (var obj in vector) { line = line + obj.ToString() + " "; //Console.Write(value); } file.WriteLine(line); //Console.WriteLine(); } } //end of foreach index file.Close(); } //end of skindetect tool strip
private List <double> automateFeatureNormalizationExtraction(Bitmap rawBitmapData) { Bitmap afterSkinOnly = performSkinExtract(rawBitmapData); Grayscale filter = new Grayscale(0.2125, 0.71254, 0.0721); Bitmap grayImage = filter.Apply(afterSkinOnly); Threshold filter2 = new Threshold(100); Bitmap filteredImage = filter2.Apply(grayImage); Closing close = new Closing(); close.ApplyInPlace(filteredImage); Opening open = new Opening(); open.ApplyInPlace(filteredImage); // create filter for the filtered image ExtractBiggestBlob filter3 = new ExtractBiggestBlob(); // apply the filter Bitmap biggestBlobsImage = filter3.Apply(filteredImage); AForge.IntPoint a = filter3.BlobPosition; //Console.WriteLine(a); //Biggest blob for old extracted skin image ExtractBiggestBlob filter4 = new ExtractBiggestBlob(); Bitmap skinBlob = new Bitmap(afterSkinOnly); //apply filter Bitmap biggestSkinBlob = filter4.Apply(skinBlob); //Skin color for largest blob Bitmap one = new Bitmap(biggestSkinBlob); Bitmap two = new Bitmap(biggestBlobsImage); int i, j; for (i = 0; i < two.Width; i++) { for (j = 0; j < two.Height; j++) { Color pixelOne = one.GetPixel(i, j); Color pixelTwo = two.GetPixel(i, j); int redOne = pixelOne.R; int greenOne = pixelOne.G; int blueOne = pixelOne.B; int redTwo = pixelTwo.R; int greenTwo = pixelTwo.G; int blueTwo = pixelTwo.B; // This mask is logically AND with original image to extract only the palm which is required for feature extraction. two.SetPixel(i, j, Color.FromArgb(redOne & redTwo, greenOne & greenTwo, blueOne & blueTwo)); } } //Getting a grayscae image from the recolored image Bitmap getGrayImage = filter.Apply(two); // create filter CannyEdgeDetector filter1 = new CannyEdgeDetector(); filter1.LowThreshold = 0; filter1.HighThreshold = 0; filter1.GaussianSigma = 1.4; // apply the filter Bitmap cannyEdgeImage = filter1.Apply(getGrayImage); Bitmap resizeImage = new Bitmap(360, 360); using (var graphics = Graphics.FromImage(resizeImage)) graphics.DrawImage(cannyEdgeImage, 0, 0, 360, 360); pictureBox3.Image = new Bitmap(resizeImage); pictureBox3.SizeMode = PictureBoxSizeMode.StretchImage; int x, y; //Image to obtain blocks for Bitmap imageWithBlock = new Bitmap(resizeImage); //Console.WriteLine("Width = " + resizeImage.Width + " Height = " + resizeImage.Height); int imageHeightSize = resizeImage.Height / blockSize; int imageWidthSize = resizeImage.Width / blockSize; //Console.WriteLine("Width = " + imageWidthSize + " Height = " + imageHeightSize); List <int> featureVector = new List <int>(); double totalPixelCount = 0; for (i = 0; i < blockSize; i++) { for (j = 0; j < blockSize; j++) { int whiteEdgeCount = 0, blackEdgeCount = 0; for (x = i * imageWidthSize; x < (i * imageWidthSize) + imageWidthSize; x++) { for (y = j * imageHeightSize; y < (j * imageHeightSize) + imageHeightSize; y++) { // To count the edges in the range Color singlePixel = imageWithBlock.GetPixel(x, y); int red = singlePixel.R; int green = singlePixel.G; int blue = singlePixel.B; if (singlePixel != Color.FromArgb(Color.Black.ToArgb())) { whiteEdgeCount++; } else { blackEdgeCount++; } } } //Console.WriteLine("White = " + whiteEdgeCount + " Black = " + blackEdgeCount); //Add value to total count totalPixelCount += whiteEdgeCount; // whiteCount = edges in range featureVector.Add(whiteEdgeCount); } } //Calculate Normalization and add the value to the featureNormVector List <double> featureNormVector = new List <double>(); //Total Pixel Count //Console.WriteLine(totalPixelCount); //Normalization for (i = 0; i < featureVector.Count; i++) { double normalizedValue = featureVector[i] / totalPixelCount; Console.WriteLine(normalizedValue); featureNormVector.Add(normalizedValue); } Console.WriteLine("Total count of norm(individual)=" + i); return(featureNormVector); }
private void skinColorToolStripMenuItem_Click(object sender, EventArgs e) { //Extracting RGBs Bitmap hand = new Bitmap(pictureBox1.Image, newSize); Bitmap skinDetect = new Bitmap(hand.Width, hand.Height); //Bitmap blackWhite = new Bitmap(hand.Width, hand.Height); Color black = Color.Black; //Color white = Color.White; int i, j; for (i = 0; i < hand.Width; i++) { for (j = 0; j < hand.Height; j++) { Color pixel = hand.GetPixel(i, j); int red = pixel.R; int green = pixel.G; int blue = pixel.B; /* (R, G, B) is classified as skin if: * R > 95 and G > 40 and B > 20 and * max {R, G, B} – min{R, G, B} > 15 and |R – G| > 15 and R > G and R > B */ if ((red > 95 && green > 40 && blue > 20) && (max(red, green, blue) - min(red, green, blue) > 15) && Math.Abs(red - green) > 15 && red > green && red > blue) { //Console.WriteLine("Success"); skinDetect.SetPixel(i, j, pixel); } } } pictureBox2.Image = new Bitmap(skinDetect); pictureBox2.SizeMode = PictureBoxSizeMode.StretchImage; Grayscale filter = new Grayscale(0.2125, 0.71254, 0.0721); Bitmap grayImage = filter.Apply(skinDetect); Threshold filter2 = new Threshold(100); Bitmap filteredImage = filter2.Apply(grayImage); Closing close = new Closing(); close.ApplyInPlace(filteredImage); Opening open = new Opening(); open.ApplyInPlace(filteredImage); // create filter for the filtered image ExtractBiggestBlob filter3 = new ExtractBiggestBlob(); // apply the filter Bitmap biggestBlobsImage = filter3.Apply(filteredImage); AForge.IntPoint a = filter3.BlobPosition; Console.WriteLine(a); //Biggest blob for old extracted skin image ExtractBiggestBlob filter4 = new ExtractBiggestBlob(); Bitmap skinBlob = new Bitmap(skinDetect); //apply filter Bitmap biggestSkinBlob = filter4.Apply(skinBlob); //Skin color for largest blob Bitmap one = new Bitmap(biggestSkinBlob); Bitmap two = new Bitmap(biggestBlobsImage); for (i = 0; i < two.Width; i++) { for (j = 0; j < two.Height; j++) { Color pixelOne = one.GetPixel(i, j); Color pixelTwo = two.GetPixel(i, j); int redOne = pixelOne.R; int greenOne = pixelOne.G; int blueOne = pixelOne.B; int redTwo = pixelTwo.R; int greenTwo = pixelTwo.G; int blueTwo = pixelTwo.B; // This mask is logically AND with original image to extract only the palm which is required for feature extraction. two.SetPixel(i, j, Color.FromArgb(redOne & redTwo, greenOne & greenTwo, blueOne & blueTwo)); } } //Getting a grayscae image from the recolored image Bitmap getGrayImage = filter.Apply(two); // create filter CannyEdgeDetector filter1 = new CannyEdgeDetector(); filter1.LowThreshold = 0; filter1.HighThreshold = 0; filter1.GaussianSigma = 1.4; // apply the filter Bitmap cannyEdgeImage = filter1.Apply(getGrayImage); Bitmap resizeImage = new Bitmap(360, 360); using (var graphics = Graphics.FromImage(resizeImage)) graphics.DrawImage(cannyEdgeImage, 0, 0, 360, 360); pictureBox3.Image = new Bitmap(resizeImage); pictureBox3.SizeMode = PictureBoxSizeMode.StretchImage; int x, y; //Image to obtain blocks for Bitmap imageWithBlock = new Bitmap(resizeImage); Console.WriteLine("Width = " + resizeImage.Width + " Height = " + resizeImage.Height); int imageHeightSize = resizeImage.Height / blockSize; int imageWidthSize = resizeImage.Width / blockSize; Console.WriteLine("Width = " + imageWidthSize + " Height = " + imageHeightSize); List <int> featureVector = new List <int>(); double totalPixelCount = 0; for (i = 0; i < blockSize; i++) { for (j = 0; j < blockSize; j++) { int whiteEdgeCount = 0, blackEdgeCount = 0; for (x = i * imageWidthSize; x < (i * imageWidthSize) + imageWidthSize; x++) { for (y = j * imageHeightSize; y < (j * imageHeightSize) + imageHeightSize; y++) { // To count the edges in the range Color singlePixel = imageWithBlock.GetPixel(x, y); int red = singlePixel.R; int green = singlePixel.G; int blue = singlePixel.B; if (singlePixel != Color.FromArgb(Color.Black.ToArgb())) { whiteEdgeCount++; } else { blackEdgeCount++; } } } //Console.WriteLine("White = " + whiteEdgeCount + " Black = " + blackEdgeCount); //Add value to total count totalPixelCount += whiteEdgeCount; // whiteCount = edges in range featureVector.Add(whiteEdgeCount); } } //Calculate Normalization and add the value to the featureNormVector List <double> featureNormVector = new List <double>(); //Total Pixel Count //Console.WriteLine(totalPixelCount); //Normalization for (i = 0; i < featureVector.Count; i++) { double normalizedValue = featureVector[i] / totalPixelCount; Console.WriteLine(normalizedValue); featureNormVector.Add(normalizedValue); } }
protected override string ProcessInternal(Bitmap bitmap) { using (var grayScaleImage = GrayScaleImageHelper.ToGrayScale(bitmap)) { var bounds = new Rectangle(0, 0, grayScaleImage.Width, grayScaleImage.Height); BitmapData bitmapData = grayScaleImage.LockBits(bounds, ImageLockMode.ReadOnly, grayScaleImage.PixelFormat); var grayScaleHW = new byte[grayScaleImage.Height * bitmapData.Stride]; Marshal.Copy(bitmapData.Scan0, grayScaleHW, 0, grayScaleImage.Height * bitmapData.Stride); var stride = bitmapData.Stride; grayScaleImage.UnlockBits(bitmapData); if (!_backgroundModel.IsOperational()) { _backgroundModel.Train(grayScaleHW, grayScaleImage.Width, grayScaleImage.Height, stride); if (_backgroundModel.IsOperational()) { _interceptor.Intercept(MinIntensityBackgroundDebugView, GrayScaleImageHelper.FromData2(_backgroundModel._width, _backgroundModel._height, _backgroundModel._stride, _backgroundModel._minIntensity)); _interceptor.Intercept(MaxIntensityBackgroundDebugView, GrayScaleImageHelper.FromData2(_backgroundModel._width, _backgroundModel._height, _backgroundModel._stride, _backgroundModel._maxIntensity)); _interceptor.Intercept(MaxPerFrameDifferenceDebugView, GrayScaleImageHelper.FromData2(_backgroundModel._width, _backgroundModel._height, _backgroundModel._stride, _backgroundModel._maxPerFrameDifference)); } return(null); } var foreground = GetForefround(grayScaleHW, grayScaleImage.Width, grayScaleImage.Height, stride); //excluded, because does not work. //ExcludeShadows(grayScaleHW, foreground, grayScaleImage.Width, grayScaleImage.Height, stride); var temp = GrayScaleImageHelper.FromData( _backgroundModel._width, _backgroundModel._height, _backgroundModel._stride, foreground); int countDetected = 0; // create filter var diamondMask = CreateClosingOpeningFileter(); Closing filter = new Closing(diamondMask); Opening filter2 = new Opening(diamondMask); // apply the filter filter.ApplyInPlace(temp); filter2.ApplyInPlace(temp); _interceptor.Intercept( XXX, ImageHelper.ToBytes(temp)); // blobs! var bc = new BlobCounter { BackgroundThreshold = Color.FromArgb(254, 254, 254) }; bc.ProcessImage(temp); Rectangle[] rects = bc.GetObjectsRectangles(); var rectanglesToDraw = new List <Rectangle>(); foreach (Rectangle rect in rects) { if (rect.Width < _maximumDetectionWidthPixels && rect.Width > _minimumDetectionWidthPixels && rect.Height < _maximumDetectionHeightPixels && rect.Height > _minimumDetectionHeightPixels) { countDetected++; rectanglesToDraw.Add(rect); } } temp.Dispose(); _interceptor.Intercept(InputFrameDebugView, GrayScaleImageHelper.FromData2(_backgroundModel._width, _backgroundModel._height, _backgroundModel._stride, grayScaleHW)); _interceptor.Intercept(DifferenceDebugView, GrayScaleImageHelper.FromData2(_backgroundModel._width, _backgroundModel._height, _backgroundModel._stride, foreground)); using (Graphics graphics = Graphics.FromImage(bitmap)) using (var brush = new SolidBrush(Color.Red)) using (var pen = new Pen(brush, 3)) { if (rectanglesToDraw.Any()) { graphics.DrawRectangles(pen, rectanglesToDraw.ToArray()); } _interceptor.Intercept(DetectedBlobDebugView, ImageHelper.ToBytes(bitmap)); } if (countDetected > 0) { return("Alarm: " + countDetected); } } return(null); }