public static Bitmap BiggestBlob(this Bitmap bitmap) { ExtractBiggestBlob filter = new ExtractBiggestBlob(); Bitmap biggestBlobImage = filter.Apply(bitmap); return(biggestBlobImage); }
private void twoHxtwoH() { ExtractBiggestBlob biggestBlob = new ExtractBiggestBlob(); imageGot = new Bitmap(imageGot); imageGot = biggestBlob.Apply(imageGot); //pictureBox1.Image = imageGot; imageGot = new Bitmap(imageGot, new Size(200, 200)); int imgHeight = imageGot.Height; int imgWidth = imageGot.Width; var rect = new Rectangle(0, 0, imgWidth, imgHeight); var data = imageGot.LockBits(rect, ImageLockMode.ReadWrite, imageGot.PixelFormat); var depth = Bitmap.GetPixelFormatSize(data.PixelFormat) / 8; //bytes per pixel var buffer = new byte[data.Width * data.Height * depth]; //copy pixels to buffer Marshal.Copy(data.Scan0, buffer, 0, buffer.Length); ProcessTwoHxTwoH(buffer, 0, 0, data.Width, data.Height, data.Width, depth); //Copy the buffer back to image Marshal.Copy(buffer, 0, data.Scan0, buffer.Length); imageGot.UnlockBits(data); pictureBox1.Image = imageGot; }
private Bitmap preProcess(Bitmap originalImage) { Invert invertObj = new Invert(); Bitmap invertImage = invertObj.Apply((Bitmap)originalImage.Clone()); invertImage = Grayscale.CommonAlgorithms.BT709.Apply(invertImage); Threshold bwObject = new Threshold(); invertImage = bwObject.Apply(invertImage); ExtractBiggestBlob blobObject = new ExtractBiggestBlob(); invertImage = blobObject.Apply(invertImage); ResizeBicubic resize = new ResizeBicubic(60, 90); invertImage = resize.Apply(invertImage); //CannyEdgeDetector edgeDetector = new CannyEdgeDetector(); //invertImage = edgeDetector.Apply(invertImage); return(invertImage); }
/// <summary> /// Proceeds the uploaded file. /// </summary> /// <param name="radFileUpload">The RAD file upload.</param> /// <param name="fileName">Name of the file.</param> /// <returns></returns> protected string ProceedUploadedFile(RadAsyncUpload radFileUpload, string fileName) { if (radFileUpload.UploadedFiles.Count > 0) { var fsp = new FileSystemProvider(); if (!string.IsNullOrEmpty(fileName)) { try { fsp.Delete(CurrentUser.Instance.SiteID, "CompanyLegalAccounts", fileName, FileType.Attachment); } catch (Exception ex) { Log.Error("Ошибка удаления файла", ex); } } var bmp = new Bitmap(radFileUpload.UploadedFiles[0].InputStream); bmp.MakeTransparent(); var filter = new ExtractBiggestBlob(); var biggestBlobsImage = filter.Apply(bmp); var ms = new MemoryStream(); biggestBlobsImage.Save(ms, ImageFormat.Png); ms.Position = 0; return(fsp.Upload(CurrentUser.Instance.SiteID, "CompanyLegalAccounts", Path.ChangeExtension(radFileUpload.UploadedFiles[0].FileName, "png"), ms, FileType.Attachment)); } return(string.Empty); }
private void blobToolStripMenuItem_Click(object sender, EventArgs e) { ExtractBiggestBlob b = new ExtractBiggestBlob(); Bitmap blob = b.Apply(k); pictureBox4.Image = blob; }
public Rectangle GetCenter(Bitmap bmp, Globals g) { BlobCounter bl = new BlobCounter(bmp); int i = bl.ObjectsCount; ExtractBiggestBlob fil2 = new ExtractBiggestBlob(); //get position int x = 0; int y = 0; int h = 0; int width = 0; int height = 0; if (i > 0) { h = fil2.Apply(bmp).Height; x = fil2.BlobPosition.X; y = fil2.BlobPosition.Y; width = h * 2; height = h * 2; g.eyefound = true; } else { g.eyefound = false; } Rectangle section = new Rectangle(new Point((x + 50) - h, (y + 50) - h), new Size(width, height)); return(section); }
private void blobToolStripMenuItem_Click(object sender, EventArgs e) { ExtractBiggestBlob filter = new ExtractBiggestBlob(); // apply the filter biggestBlobsImage = filter.Apply(k); pictureBox4.Image = biggestBlobsImage; IntPoint blobPosition = filter.BlobPosition; Console.WriteLine(blobPosition.X); Console.WriteLine(blobPosition.Y); Console.WriteLine(biggestBlobsImage.Width); Console.WriteLine(biggestBlobsImage.Height); fd = new Bitmap(biggestBlobsImage); int i, j; for (i = 0; i < (biggestBlobsImage.Width); i++) { for (j = 0; j < (biggestBlobsImage.Height); j++) { Color hg = (resultImage.GetPixel(i + blobPosition.X, j + blobPosition.Y)); int r = fd.GetPixel(i, j).R; int g = fd.GetPixel(i, j).G; int b = fd.GetPixel(i, j).B; if ((r = g = b) == 255) { fd.SetPixel(i, j, hg); } else { fd.SetPixel(i, j, Color.Black); } } } }
private void extractBiggestBlobToolStripMenuItem_Click(object sender, EventArgs e) { ExtractBiggestBlob filter = new ExtractBiggestBlob(); pictureBox2.Image = filter.Apply((Bitmap)pictureBox2.Image); blob = filter.BlobPosition; }
/// <summary> /// CLASS MENENTUKAN OBJEK TERBESAR /// </summary> void extractbiggestkmeans() { Bitmap gambar = (Bitmap)pictureBox5.Image.Clone(); ExtractBiggestBlob filter = new ExtractBiggestBlob(); pictureBox3.Image = filter.Apply(gambar); }
public Bitmap ToExtractBiggestBlob(Bitmap Im) { AForge.Imaging.Filters.ExtractBiggestBlob Img = new ExtractBiggestBlob(); Bitmap bmImage = AForge.Imaging.Image.Clone(new Bitmap(Im), PixelFormat.Format24bppRgb); return(Img.Apply(bmImage)); }
private void deleteedges_Click(object sender, EventArgs e) { //filtre oluşturuldu ExtractBiggestBlob filtre = new ExtractBiggestBlob(); //filtre uygulandı Bitmap temizResim = filtre.Apply((Bitmap)pictureBox1.Image); pictureBox1.Image = temizResim; }
/// <summary> /// CLASS MENENTUKAN OBJEK TERBESAR DARI OBJEK ASLI /// </summary> void extractbiggest() { Bitmap gambar = (Bitmap)pictureBox10.Image.Clone(); ExtractBiggestBlob filter = new ExtractBiggestBlob(); Bitmap img = new Bitmap(gambar); Bitmap imageext = filter.Apply(img); pictureBox9.BackColor = Color.Empty; pictureBox9.Image = imageext; pictureBox9.Image.Save(@"E:\Gambar\Gambar_ExctractBiggest" + "\\" + jumlahdt.ToString() + ".bmp"); }
/// <summary> /// Determines whether card is a face card(Jack,Queen,King) or not. /// If card is a face card , then it will have a big blob whose width will be /// larger than half width of card /// If card isn't a face card, then width of all blobs will be less than half width of card /// </summary> /// <param name="bmp">Card of image to be analyzed</param> /// <returns>True if its a face card, false if not</returns> private bool IsFaceCard(Bitmap bmp) { Bitmap temp = this.commonSeq.Apply(bmp); ExtractBiggestBlob extractor = new ExtractBiggestBlob(); temp = extractor.Apply(temp); //Extract biggest blob if (temp.Height > bmp.Height / 5) //If width is larger than half width of card { return(true); //Its a face card } return(false); //It is not a face card }
///http://www.aforgenet.com/framework/docs/html/2d04f587-3272-2ad5-f8bc-54ff407d41f2.htm public void SubtractAForge() { Bitmap back = (Bitmap)Bitmap.FromFile("C:\\Picture 70.jpg"); Subtract filter = new Subtract(back); Bitmap sourceImage = (Bitmap)Bitmap.FromFile("C:\\Picture 71.jpg"); var image = filter.Apply(sourceImage); ExtractBiggestBlob filter2 = new ExtractBiggestBlob(); Bitmap biggestBlobsImage = filter2.Apply(image); var o = filter2.OriginalImage; var p = filter2.BlobPosition; biggestBlobsImage.Save("C:\\Picture 71-70 Result.jpg", ImageFormat.Jpeg); }
private void extractLargestBlob() { ExtractBiggestBlob biggestBlob = new ExtractBiggestBlob(); Bitmap newImage = biggestBlob.Apply(imageGot); int newImHeight = newImage.Height; int newImWidth = newImage.Width; // create filter BlobsFiltering filter = new BlobsFiltering(); filter.CoupledSizeFiltering = true; filter.MinWidth = newImHeight; filter.MinHeight = newImWidth; imageGot = filter.Apply(imageGot); pictureBox1.Image = imageGot; }
// Finds the biggest area of one color public Bitmap BiggestBlob(Bitmap bitmap) { try { ExtractBiggestBlob filter = new ExtractBiggestBlob(); Bitmap edited = filter.Apply(bitmap); IntPoint blobPosition = filter.BlobPosition; Rectangle cropArea = new Rectangle(blobPosition.X, blobPosition.Y, edited.Width, edited.Height); edited = CropImage(bitmap, cropArea); return(edited); } catch { return(bitmap); } }
/// <summary> /// 根据数字图片区域获取花色图片 /// </summary> /// <param name="src"></param> /// <param name="rectNum"></param> /// <returns></returns> private Bitmap GetColorBitmap(Bitmap src, Rectangle rectNum) { var colorRect = new Rectangle(rectNum.X, rectNum.Y + rectNum.Height, rectNum.Width, src.Height - rectNum.Height - rectNum.Y); //处理花色图片 var bmpColor = _seq.Apply(src.Clone(colorRect, src.PixelFormat)); //反色 ExchangeIndexColor(bmpColor, 0, 255); //返回最大块的花色块图片 var maxBlobBmp = _extractBiggestBlob.Apply(bmpColor); //需要返回24位的图片,所以需要拷贝原图 colorRect = new Rectangle(colorRect.X + _extractBiggestBlob.BlobPosition.X, colorRect.Y + _extractBiggestBlob.BlobPosition.Y, maxBlobBmp.Width, maxBlobBmp.Height); return(_resizeColorFilter.Apply(src.Clone(colorRect, PixelFormat.Format24bppRgb))); }
private void extractBiggestBlob() // Extract biggest blob { try { // Delare image Bitmap bmp = new Bitmap(pictureBox1.Image); // create filter ExtractBiggestBlob filter = new ExtractBiggestBlob(); // apply the filter Bitmap biggestBlobsImage = filter.Apply(bmp); pictureBox1.Image = biggestBlobsImage; } catch (Exception exc) { MessageBox.Show(exc.Message); } }
public static Bitmap ExtractBiggestBlob(this Bitmap image1, ref Rectangle rec) { lock (typeof(Lock_ExtractBiggestBlob)) { FiltersSequence commonSeq = new FiltersSequence(); commonSeq.Add(Grayscale.CommonAlgorithms.BT709); commonSeq.Add(new BradleyLocalThresholding()); commonSeq.Add(new DifferenceEdgeDetector()); using (Bitmap temp = commonSeq.Apply(image1)) { ExtractBiggestBlob extractor = new ExtractBiggestBlob(); var img = extractor.Apply(temp); rec = new Rectangle(extractor.BlobPosition.X, extractor.BlobPosition.Y, img.Width, img.Height); return(img); } } }
/// <summary> /// 检测是否存在二维码区域 /// </summary> /// <param name="source"></param> /// <returns></returns> public Tuple <bool, Rectangle, Bitmap> IsExistQRCode(Bitmap source, int ticketType) { int qrCodeW = 0, qrCodeH = 0; Bitmap temp; if (Config.BLUE_TICKET == ticketType) { qrCodeW = Convert.ToInt16(Config.BLUE_QRCODE_W_COPR_RATIO * Config.BLUE_TICKET_WIDTH); qrCodeH = Convert.ToInt16(Config.BLUE_QRCODE_H_COPR_RATIO * Config.BLUE_TICKET_HEIGHT); temp = extractQRCodeSeqForBlueTicket.Apply(source); } else { qrCodeW = Convert.ToInt16(Config.RED_QRCODE_W_COPR_RATIO * Config.RED_TICKET_WIDTH); qrCodeH = Convert.ToInt16(Config.RED_QRCODE_H_COPR_RATIO * Config.RED_TICKET_HEIGHT); temp = extractQRCodeSeqForRedTicket.Apply(source); } ExtractBiggestBlob ebb = new ExtractBiggestBlob(); Bitmap qr = ebb.Apply(temp); if (qr.Width >= 100 && qr.Height >= 100) { // Rectangle rect = new Rectangle(ebb.BlobPosition.X,ebb.BlobPosition.Y,qr.Width,qr.Height); // Rectangle rect = new Rectangle(ebb.BlobPosition.X , ebb.BlobPosition.Y , qr.Width , qr.Height ); Rectangle rect = cutQRCodeArea(qr); Rectangle rectCut = new Rectangle(); rectCut.X = ebb.BlobPosition.X + rect.X - 3; rectCut.Y = ebb.BlobPosition.Y + rect.Y - 3; rectCut.Width = rect.Width + 6; rectCut.Height = rect.Height + 6; Crop c = new Crop(rectCut); Bitmap qrcode = c.Apply(source); return(Tuple.Create <bool, Rectangle, Bitmap>(true, rectCut, qrcode)); } return(Tuple.Create <bool, Rectangle, Bitmap>(false, new Rectangle(), null)); }
/// <summary> /// Scans and returns suit of card. NOTE : Scans suit of cards that are not face cards /// For recognizing suit, analyzes color and size of suit blob /// </summary> /// <param name="suitBmp">Suit image to be scanned</param> /// <param name="color">Color of card. 'R' means Red, 'B' means black</param> /// <returns>Scanned Suit</returns> private Suit ScanSuit(Bitmap suitBmp, char color) { Bitmap temp = commonSeq.Apply(suitBmp); ExtractBiggestBlob extractor = new ExtractBiggestBlob(); //Extract biggest blob on card temp = extractor.Apply(temp); //Biggest blob is suit blob so extract it Suit suit = Suit.NOT_RECOGNIZED; //Determine type of suit according to its color and width if (color == 'B') { suit = temp.Width <= 45 ? Suit.Spades : Suit.Clubs; } if (color == 'R') { suit = temp.Height >= 53 ? Suit.Diamonds : Suit.Hearts; } return(suit); }
private void segmentationToolStripMenuItem_Click(object sender, EventArgs e) { Grayscale grayscale = new GrayscaleBT709(); grayimage = grayscale.Apply(original); pictureBox2.Image = grayimage; Threshold bw = new Threshold(); thresholdimage = bw.Apply(grayimage); pictureBox2.Image = thresholdimage; // ResizeBicubic resize = new ResizeBicubic(200, thresholdimage.Height); // thresholdimage = resize.Apply(thresholdimage); List <int> xC = new List <int> (); int xPrev = 0; for (int i = 0; i < thresholdimage.Width; i++) { count = 0; for (int j = 0; j < thresholdimage.Height; j++) { Color p = thresholdimage.GetPixel(i, j); if (p.R == 0 && p.G == 0 && p.B == 0) { count++; } } if (count != 0) { if (xC.Count == 0) { xC.Add(i); } else { if (i - xPrev > 1) { xC.Add(i); } } xPrev = i; } } List <Bitmap> ni = new List <Bitmap>(); for (int i = 0; i < xC.Count; i++) { int endbound; if (i + 1 >= xC.Count) { endbound = thresholdimage.Width; } else { endbound = xC[i + 1]; } Crop cr = new Crop(new Rectangle(xC[i], 0, endbound - xC[i], thresholdimage.Height)); newImage = cr.Apply(thresholdimage); ni.Add(newImage); pictureBox4.Image = newImage; // newImage.Save(@"C:\users\rahul\desktop\sampleImage.png"); } Invert inv = new Invert(); invertimage = inv.Apply(newImage); pictureBox5.Image = invertimage; ExtractBiggestBlob ebb = new ExtractBiggestBlob(); blobimage = ebb.Apply(invertimage); pictureBox5.Image = blobimage; Invert inv1 = new Invert(); invertimage1 = inv1.Apply(blobimage); pictureBox5.Image = invertimage1; List <int> yC = new List <int>(); int xPrev1 = 0; for (int j = 0; j < thresholdimage.Height; j++) { count = 0; for (int i = 0; i < thresholdimage.Width; i++) { Color p = thresholdimage.GetPixel(i, j); if (p.R == 0 && p.G == 0 && p.B == 0) { count++; } if (count != 0) { if (yC.Count == 0) { yC.Add(j); } else { if (j - xPrev1 > 1) { yC.Add(j); } } xPrev1 = j; } } } List <Bitmap> ni1 = new List <Bitmap>(); for (int j = 0; j < yC.Count; j++) { int endbound; if (j + 1 >= yC.Count) { endbound = thresholdimage.Height; } else { endbound = yC[j + 1]; } Crop cr = new Crop(new Rectangle(0, yC[j], thresholdimage.Height, endbound - yC[j])); newImage3 = cr.Apply(thresholdimage); ni1.Add(newImage3); pictureBox3.Image = newImage3; // newImage.Save(@"C:\users\rahul\desktop\sampleImage1.png"); } Invert inv2 = new Invert(); invertimage3 = inv2.Apply(newImage3); pictureBox6.Image = invertimage3; ExtractBiggestBlob ebb1 = new ExtractBiggestBlob(); blobimage1 = ebb1.Apply(invertimage3); pictureBox6.Image = blobimage1; Invert inv3 = new Invert(); invertimage2 = inv3.Apply(blobimage1); pictureBox6.Image = invertimage2; }
public Bitmap Apply(Bitmap originalImage) { //reduce image size so that less, bicubic resizes with less breakage ResizeBicubic resizeObject = new ResizeBicubic(200, 200); Bitmap smallOriginalImage = resizeObject.Apply(originalImage); Bitmap copiedImage = (Bitmap)smallOriginalImage.Clone(); // to get the colour of the pixel passed as parameter for (int x = 0; x < smallOriginalImage.Width; x++) { for (int y = 0; y < smallOriginalImage.Height; y++) { if (!isSkin(copiedImage.GetPixel(x, y))) { copiedImage.SetPixel(x, y, Color.Black); } } } copiedImage = Grayscale.CommonAlgorithms.BT709.Apply(copiedImage); Threshold bwObj = new Threshold(50); copiedImage = bwObj.Apply(copiedImage); //applying closing to remove small black spots(closing holes in the image) i.e dilusion followed by erosion AForge.Imaging.Filters.Closing filter = new Closing(); copiedImage = filter.Apply(copiedImage); //pictureBox2.Image = copiedImage; //extracting the biggest blob or a blob to get only the palms, here we get the bounding box //bounding box is the smallest box having the image, hence we see only the palms ExtractBiggestBlob biggestblobObject = new ExtractBiggestBlob(); copiedImage = biggestblobObject.Apply(copiedImage); //we need to get the coordinates of the bounding box IntPoint point = biggestblobObject.BlobPosition; //create a rectangle to pass to the crop class, it takes x,y,height,width Rectangle rect = new Rectangle(point.X, point.Y, copiedImage.Width, copiedImage.Height); Crop cropObject = new Crop(rect); //we pass the original image because that cohtains noise, we remove the background and have only palms Bitmap croppedImage = cropObject.Apply(smallOriginalImage); //we still have a lot of background which need to be removed as the background between the fingers have background //hence we do a logical and between original image and the cropped image with pixels having white pixel //this operation is called as masking for (int x = 0; x < copiedImage.Width; x++) { for (int y = 0; y < copiedImage.Height; y++) { Color c = copiedImage.GetPixel(x, y); if (c.R == 0 && c.G == 0 && c.B == 0) { croppedImage.SetPixel(x, y, Color.Black); } } } //it takes time because each pixel is checked and the image is huge, //so we need to resize, hence we do smallOriginalImage //we need to resize all objects to a standard size croppedImage = resizeObject.Apply(croppedImage); //pictureBox2.Image = croppedImage; croppedImage = Grayscale.CommonAlgorithms.BT709.Apply(croppedImage); CannyEdgeDetector cannyObj = new CannyEdgeDetector(0, 0, 1.4); croppedImage = cannyObj.Apply(croppedImage); Threshold thresObj = new Threshold(20); croppedImage = thresObj.Apply(croppedImage); return(croppedImage); }
private void skinDetectToolStripMenuItem_Click(object sender, EventArgs e) { List <List <double> > Listofvectors = new List <List <double> >(); System.IO.StreamWriter file = new System.IO.StreamWriter(@"C:\Users\gsrip\Documents\MyDocuments\Saarthi AI and IP\Segmented\segmented.txt", append: true); String alphabets = "0ABCDEFGHIJKLMNOPQRSTUVWXYZ"; for (int index = 1; index <= 26; index++) { //for each folder select all filenames filenames = Directory.GetFiles(dirnames[index - 1]); int n = 0; foreach (string filename in filenames) { //load an image in a bitmap Bitmap bmplocal = new Bitmap(filename); int height = 300, width = 300; bmp = new Bitmap(bmplocal, width, height); pictureBox1.Image = new Bitmap(bmp); pictureBox1.SizeMode = PictureBoxSizeMode.StretchImage; using (bmp) using (skinBmp = new Bitmap(bmp.Width, bmp.Height)) { //skin detection for (int x = 0; x < bmp.Width; x++) { for (int y = 0; y < bmp.Height; y++) { Color pixel = bmp.GetPixel(x, y); int red = pixel.R; int blue = pixel.B; int green = pixel.G; int max = Math.Max(red, Math.Max(green, blue)); int min = Math.Min(red, Math.Min(green, blue)); int rgdif = red - green; int abs = Math.Abs(rgdif); if (red > 95 && green > 40 && blue > 20 && max - min > 15 && abs > 15 && red > green && red > blue) { skinBmp.SetPixel(x, y, pixel); } } } pictureBox2.Image = new Bitmap(skinBmp); //grayscale filter (BT709) Grayscale filter1 = new Grayscale(0.2125, 0.7154, 0.0721); Bitmap newImage = new Bitmap(bmp); Bitmap grayImage = filter1.Apply(newImage); Threshold filter2 = new Threshold(100); Bitmap bwImage = filter2.Apply(grayImage); Closing filter5 = new Closing(); filter5.ApplyInPlace(bwImage); Opening filter3 = new Opening(); filter3.ApplyInPlace(bwImage); ExtractBiggestBlob filter4 = new ExtractBiggestBlob(); Bitmap biggestBlobsImage = filter4.Apply(bwImage); ExtractBiggestBlob filter6 = new ExtractBiggestBlob(); Bitmap biggestBlobsImage1 = filter6.Apply((Bitmap)pictureBox2.Image); Bitmap orgimage = new Bitmap(biggestBlobsImage1, 300, 300); Bitmap blobimage = new Bitmap(biggestBlobsImage, 300, 300); Bitmap newimage = new Bitmap(300, 300); //anding the two images for (int x = 0; x < 300; x++) { for (int y = 0; y < 300; y++) { Color pixel1 = orgimage.GetPixel(x, y); Color pixel2 = blobimage.GetPixel(x, y); int red1 = pixel1.R, red2 = pixel2.R; int blue1 = pixel1.B, blue2 = pixel2.B; int green1 = pixel1.G, green2 = pixel2.G; int newred, newblue, newgreen; newred = red1 & red2; newblue = blue1 & blue2; newgreen = green1 & green2; Color newpixel = Color.FromArgb(newred, newgreen, newblue); newimage.SetPixel(x, y, newpixel); } } CannyEdgeDetector filter7 = new CannyEdgeDetector(); Grayscale filter = new Grayscale(0.2125, 0.7154, 0.0721); Bitmap edges = filter.Apply(newimage); filter7.ApplyInPlace(edges); pictureBox3.Image = new Bitmap(edges); String location = "C:\\Users\\gsrip\\Documents\\MyDocuments\\Saarthi AI and IP\\Segmented\\"; location = location + alphabets[index].ToString() + "\\image"; newimage.Save(@location + (n++).ToString() + ".jpg"); List <int> featureVector = new List <int>(); for (int i = 0; i < 6; i++) { for (int j = 0; j < 6; j++) { int count = 0; for (int x = i * 50; x < (i * 50) + 50; x++) { for (int y = j * 50; y < (j * 50) + 50; y++) { Color pixel = edges.GetPixel(x, y); if (pixel.R != 0 && pixel.G != 0 && pixel.B != 0) { count++; } } } featureVector.Add(count); } } int sumofvector = featureVector.Sum(); List <double> featureVectorNorm = new List <double>(); foreach (var d in featureVector) { featureVectorNorm.Add((double)d / sumofvector); } Listofvectors.Add(featureVectorNorm); }//end of using } // end of foreach filename foreach (var vector in Listofvectors) { String line = index.ToString() + ": "; //Console.WriteLine(value); foreach (var obj in vector) { line = line + obj.ToString() + " "; //Console.Write(value); } file.WriteLine(line); //Console.WriteLine(); } } //end of foreach index file.Close(); } //end of skindetect tool strip
private void Detection() { var watch = System.Diagnostics.Stopwatch.StartNew(); if (Video.Image != null) { if (ModeList.selectedIndex == 0) { training = 1; int prev = AlphabetList.selectedIndex; if (AlphabetList.selectedIndex == 26 || prev == 26) { label = 67; } else if (AlphabetList.selectedIndex == -1) { label = prev; } else { label = AlphabetList.selectedIndex; } } else { training = 0; } ProgressBar.Visible = true; ProgressBar.Value = 0; ProgressBar.Maximum_Value = 9; ProgressBar.Value += 1; CapturedBox.Image = (Bitmap)Video.Image.Clone(); Bitmap src = new Bitmap(CapturedBox.Image); //skin detection var image = new Rectangle(0, 0, src.Width, src.Height); var value = src.LockBits(image, ImageLockMode.ReadWrite, src.PixelFormat); var size = Bitmap.GetPixelFormatSize(value.PixelFormat) / 8; var buffer = new byte[value.Width * value.Height * size]; Marshal.Copy(value.Scan0, buffer, 0, buffer.Length); System.Threading.Tasks.Parallel.Invoke( () => { Skin_process(buffer, 0, 0, value.Width / 2, value.Height / 2, value.Width, size); }, () => { Skin_process(buffer, 0, value.Height / 2, value.Width / 2, value.Height, value.Width, size); }, () => { Skin_process(buffer, value.Width / 2, 0, value.Width, value.Height / 2, value.Width, size); }, () => { Skin_process(buffer, value.Width / 2, value.Height / 2, value.Width, value.Height, value.Width, size); } ); Marshal.Copy(buffer, 0, value.Scan0, buffer.Length); src.UnlockBits(value); SkinBox.Image = src; if (Skin == 1) { ProgressBar.Value += 1; //Dilation & Erosion src = Grayscale.CommonAlgorithms.BT709.Apply(src); BinaryDilation3x3 dilatation = new BinaryDilation3x3(); BinaryErosion3x3 erosion = new BinaryErosion3x3(); for (int a = 1; a <= 10; a++) { src = dilatation.Apply(src); } for (int a = 1; a <= 10; a++) { src = erosion.Apply(src); } ProgressBar.Value += 1; NoiseBox.Image = src; //Blob try { ExtractBiggestBlob blob = new ExtractBiggestBlob(); src = blob.Apply(src); x = blob.BlobPosition.X; y = blob.BlobPosition.Y; ProgressBar.Value += 1; } catch { this.Show(); //MessageBox.Show("Lightning conditions are not good for detecting the gestures", "Bad Lights", MessageBoxButtons.OK, MessageBoxIcon.Information); } //Merge Bitmap srcImage = new Bitmap(CapturedBox.Image); Bitmap dstImage = new Bitmap(src); var srcrect = new Rectangle(0, 0, srcImage.Width, srcImage.Height); var dstrect = new Rectangle(0, 0, dstImage.Width, dstImage.Height); var srcdata = srcImage.LockBits(srcrect, ImageLockMode.ReadWrite, srcImage.PixelFormat); var dstdata = dstImage.LockBits(dstrect, ImageLockMode.ReadWrite, dstImage.PixelFormat); var srcdepth = Bitmap.GetPixelFormatSize(srcdata.PixelFormat) / 8; var dstdepth = Bitmap.GetPixelFormatSize(dstdata.PixelFormat) / 8; //bytes per pixel var srcbuffer = new byte[srcdata.Width * srcdata.Height * srcdepth]; var dstbuffer = new byte[dstdata.Width * dstdata.Height * dstdepth]; //copy pixels to buffer Marshal.Copy(srcdata.Scan0, srcbuffer, 0, srcbuffer.Length); Marshal.Copy(dstdata.Scan0, dstbuffer, 0, dstbuffer.Length); System.Threading.Tasks.Parallel.Invoke( () => { //upper-left Merge_process(srcbuffer, dstbuffer, x, 0, y, 0, x + (dstdata.Width / 2), dstdata.Width / 2, y + (dstdata.Height / 2), dstdata.Height / 2, srcdata.Width, dstdata.Width, srcdepth, dstdepth); }, () => { //upper-right Merge_process(srcbuffer, dstbuffer, x + (dstdata.Width / 2), dstdata.Width / 2, y, 0, x + (dstdata.Width), dstdata.Width, y + (dstdata.Height / 2), dstdata.Height / 2, srcdata.Width, dstdata.Width, srcdepth, dstdepth); }, () => { //lower-left Merge_process(srcbuffer, dstbuffer, x, 0, y + (dstdata.Height / 2), dstdata.Height / 2, x + (dstdata.Width / 2), dstdata.Width / 2, y + (dstdata.Height), dstdata.Height, srcdata.Width, dstdata.Width, srcdepth, dstdepth); }, () => { //lower-right Merge_process(srcbuffer, dstbuffer, x + (dstdata.Width / 2), dstdata.Width / 2, y + (dstdata.Height / 2), dstdata.Height / 2, x + (dstdata.Width), dstdata.Width, y + (dstdata.Height), dstdata.Height, srcdata.Width, dstdata.Width, srcdepth, dstdepth); } ); //Copy the buffer back to image Marshal.Copy(srcbuffer, 0, srcdata.Scan0, srcbuffer.Length); Marshal.Copy(dstbuffer, 0, dstdata.Scan0, dstbuffer.Length); srcImage.UnlockBits(srcdata); dstImage.UnlockBits(dstdata); src = dstImage; ProgressBar.Value += 1; CropBox.Image = src; //Resize ResizeBilinear resize = new ResizeBilinear(200, 200); src = resize.Apply(src); ProgressBar.Value += 1; //Edges src = Grayscale.CommonAlgorithms.BT709.Apply((Bitmap)src); SobelEdgeDetector edges = new SobelEdgeDetector(); src = edges.Apply(src); ProgressBar.Value += 1; EdgeDetectorBox.Image = src; //HOEF Bitmap block = new Bitmap(src); int[] edgescount = new int[50]; double[] norm = new double[200]; String text = null; int sum = 0; int z = 1; for (int p = 1; p <= 6; p++) { for (int q = 1; q <= 6; q++) { for (int x = (p - 1) * block.Width / 6; x < (p * block.Width / 6); x++) { for (int y = (q - 1) * block.Height / 6; y < (q * block.Height / 6); y++) { Color colorPixel = block.GetPixel(x, y); int r = colorPixel.R; int g = colorPixel.G; int b = colorPixel.B; if (r != 0 & g != 0 & b != 0) { edgescount[z]++; } } } z++; } } for (z = 1; z <= 36; z++) { sum = sum + edgescount[z]; } for (z = 1; z <= 36; z++) { norm[z] = (double)edgescount[z] / sum; text = text + " " + z.ToString() + ":" + norm[z].ToString(); } if (training == 1) { File.AppendAllText(@"D:\train.txt", label.ToString() + text + Environment.NewLine); ProgressBar.Value += 1; } else { File.WriteAllText(@"D:\test.txt", label.ToString() + text + Environment.NewLine); ProgressBar.Value += 1; //SVM Problem train = Problem.Read(@"D:\train.txt"); Problem test = Problem.Read(@"D:\test.txt"); Parameter parameter = new Parameter() { C = 32, Gamma = 8 }; Model model = Training.Train(train, parameter); Prediction.Predict(test, @"D:\result.txt", model, false); int value1 = Convert.ToInt32(File.ReadAllText(@"D:\result.txt")); String alphabet = null; if (value1 == 27) { alphabet += "Welcome "; } else if (value1 == 28) { alphabet += "Good Morning"; } else if (value1 == 29) { alphabet += "Thank You"; } else { alphabet += (char)(65 + value1); } OutputText.Text = alphabet; SpeechSynthesizer speechSynthesizer = new SpeechSynthesizer(); speechSynthesizer.SetOutputToDefaultAudioDevice(); speechSynthesizer.Volume = 100; speechSynthesizer.Rate = -2; speechSynthesizer.SelectVoiceByHints(VoiceGender.Female, VoiceAge.Child); speechSynthesizer.SpeakAsync(alphabet); if (alphabet == " ") { speechSynthesizer.SpeakAsync(OutputText.Text); } ProgressBar.Value += 1; } } else { this.Show(); } watch.Stop(); var time = (watch.ElapsedMilliseconds); float secs = (float)time / 1000; ExecutionTimeBox.Text = Convert.ToString(secs) + " " + "Seconds"; } }
private void button2_Click(object sender, EventArgs e) { if (FinalFrame.IsRunning == true) { pictureBox2.Image = (Bitmap)pictureBox1.Image.Clone(); } Bitmap InputImage = (Bitmap)pictureBox2.Image; Rectangle Tile = new Rectangle(0, 0, InputImage.Width, InputImage.Height); BitmapData bitmapdata = InputImage.LockBits(Tile, ImageLockMode.ReadWrite, InputImage.PixelFormat); int formatsize = Bitmap.GetPixelFormatSize(bitmapdata.PixelFormat) / 8; var tempreg = new byte[bitmapdata.Width * bitmapdata.Height * formatsize]; Marshal.Copy(bitmapdata.Scan0, tempreg, 0, tempreg.Length); System.Threading.Tasks.Parallel.Invoke( () => { multithread1(tempreg, 0, 0, bitmapdata.Width / 2, bitmapdata.Height / 2, bitmapdata.Width, formatsize); }, () => { multithread1(tempreg, 0, bitmapdata.Height / 2, bitmapdata.Width / 2, bitmapdata.Height, bitmapdata.Width, formatsize); }, () => { multithread1(tempreg, bitmapdata.Width / 2, 0, bitmapdata.Width, bitmapdata.Height / 2, bitmapdata.Width, formatsize); }, () => { multithread1(tempreg, bitmapdata.Width / 2, bitmapdata.Height / 2, bitmapdata.Width, bitmapdata.Height, bitmapdata.Width, formatsize); } ); Marshal.Copy(tempreg, 0, bitmapdata.Scan0, tempreg.Length); InputImage.UnlockBits(bitmapdata); Grayscale grayfilter = new Grayscale(0.2125, 0.7154, 0.0721);//GrayscaleBT709 grayfilter=new GrayscaleBT709(); Dilatation dilatefilter = new Dilatation(); Erosion erodefilter = new Erosion(); InputImage = grayfilter.Apply((Bitmap)InputImage); InputImage = dilatefilter.Apply((Bitmap)InputImage); InputImage = erodefilter.Apply((Bitmap)InputImage); //Opening openfilter = new Opening(); //InputImage=openfilter.Apply((Bitmap)InputImage); //Closing closefilter = new Closing(); //InputImage=closefilter.Apply((Bitmap)InputImage); ExtractBiggestBlob blob = new ExtractBiggestBlob(); InputImage = blob.Apply(InputImage); int cordx = blob.BlobPosition.X; int cordy = blob.BlobPosition.Y; Bitmap source = new Bitmap(pictureBox1.Image); Bitmap destination = new Bitmap(InputImage); var sourcerectangle = new Rectangle(0, 0, source.Width, source.Height); var destinationrectangle = new Rectangle(0, 0, destination.Width, destination.Height); var sourcedata = source.LockBits(sourcerectangle, ImageLockMode.ReadWrite, source.PixelFormat); var destinationdata = destination.LockBits(destinationrectangle, ImageLockMode.ReadWrite, destination.PixelFormat); var sourcedepth = Bitmap.GetPixelFormatSize(sourcedata.PixelFormat) / 8; var destinationdepth = Bitmap.GetPixelFormatSize(destinationdata.PixelFormat) / 8; var source1 = new byte[sourcedata.Width * sourcedata.Height * sourcedepth]; var destination1 = new byte[destinationdata.Width * destinationdata.Height * destinationdepth]; Marshal.Copy(sourcedata.Scan0, source1, 0, source1.Length); Marshal.Copy(destinationdata.Scan0, destination1, 0, destination1.Length); System.Threading.Tasks.Parallel.Invoke( () => { multithread2(source1, destination1, cordx, 0, cordy, 0, cordx + (destinationdata.Width / 2), destinationdata.Width / 2, cordy + (destinationdata.Height / 2), destinationdata.Height / 2, sourcedata.Width, destinationdata.Width, sourcedepth, destinationdepth); }, () => { multithread2(source1, destination1, cordx + (destinationdata.Width / 2), destinationdata.Width / 2, cordy, 0, cordx + (destinationdata.Width), destinationdata.Width, cordy + (destinationdata.Height / 2), destinationdata.Height / 2, sourcedata.Width, destinationdata.Width, sourcedepth, destinationdepth); }, () => { multithread2(source1, destination1, cordx, 0, cordy + (destinationdata.Height / 2), destinationdata.Height / 2, cordx + (destinationdata.Width / 2), destinationdata.Width / 2, cordy + (destinationdata.Height), destinationdata.Height, sourcedata.Width, destinationdata.Width, sourcedepth, destinationdepth); }, () => { multithread2(source1, destination1, cordx + (destinationdata.Width / 2), destinationdata.Width / 2, cordy + (destinationdata.Height / 2), destinationdata.Height / 2, cordx + (destinationdata.Width), destinationdata.Width, cordy + (destinationdata.Height), destinationdata.Height, sourcedata.Width, destinationdata.Width, sourcedepth, destinationdepth); } ); Marshal.Copy(source1, 0, sourcedata.Scan0, source1.Length); Marshal.Copy(destination1, 0, destinationdata.Scan0, destination1.Length); source.UnlockBits(sourcedata); destination.UnlockBits(destinationdata); InputImage = destination; InputImage = grayfilter.Apply((Bitmap)InputImage); CannyEdgeDetector edgesoutline = new CannyEdgeDetector(); InputImage = edgesoutline.Apply(InputImage); pictureBox2.Image = InputImage; Bitmap blocks = new Bitmap(InputImage); int[] numofedges = new int[100]; double[] normalized = new double[400]; String alphabet = null; int total = 0; int sq = 1; for (int p = 1; p <= 8; p++) { for (int q = 1; q <= 8; q++) { for (int x = (p - 1) * blocks.Width / 8; x < (p * blocks.Width / 8); x++) { for (int y = (q - 1) * blocks.Height / 8; y < (q * blocks.Height / 8); y++) { Color colorPixel = blocks.GetPixel(x, y); int r = colorPixel.R; int g = colorPixel.G; int b = colorPixel.B; if (r != 0 & g != 0 & b != 0) { numofedges[sq]++; } } } sq++; } } for (sq = 1; sq <= 64; sq++) { total = total + numofedges[sq]; } for (sq = 1; sq <= 64; sq++) { normalized[sq] = (double)numofedges[sq] / total; alphabet = alphabet + " " + sq.ToString() + ":" + normalized[sq].ToString(); } File.WriteAllText(@"datasets\testalpha.txt", label.ToString() + alphabet + Environment.NewLine); Problem train = Problem.Read(@"datasets\trainedset.txt"); Problem test = Problem.Read(@"datasets\testalpha.txt"); Parameter parameter = new Parameter(); parameter.C = 32; parameter.Gamma = 8; Model model = Training.Train(train, parameter); Prediction.Predict(test, @"datasets\result.txt", model, false); int value = Convert.ToInt32(File.ReadAllText(@"datasets\result.txt")); String res = null; res = res + (char)(value + 65); label1.Text = res; }
void sensor_AllFramesReady(object sender, Microsoft.Kinect.AllFramesReadyEventArgs e) { if (this.command == "Stop") { Bot.stop(); } if (this.command == "Forward") { Bot.traverse(); } if (this.command == "Right") { Bot.turnRight(); } if (this.command == "Left") { Bot.turnRight(); } xf++; if (xf % 5 == 0) { xf = 0; if (this.command != null) { using (ColorImageFrame colorFrame = e.OpenColorImageFrame()) { using (DepthImageFrame depthFrame = e.OpenDepthImageFrame()) { using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame()) { humanPosition = frameToHuman(skeletonFrame); if (colorFrame != null) { // Copy the pixel data from the image to a temporary array colorFrame.CopyPixelDataTo(this.colorPixels); // Write the pixel data into our bitmap this.colorBitmap.WritePixels( new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight), this.colorPixels, this.colorBitmap.PixelWidth * sizeof(int), 0); // Error here due to OpenCV_core290.dll //int[] objPos = new int[2]; //objPos = tmp.matchColor(ImageProc.colorFrameToImage(colorFrame)); //if (objPos != null) //{ // short blobDepth = getDepthAtPoint(objPos, depthFrame); // this.lblObject.Content = objPos[0] + ", " + objPos[1] + ", " + blobDepth; //} //else //{ // this.lblObject.Content = "Null"; //} System.Drawing.Bitmap bmp = ImageProc.colorFrameToAforge(colorFrame); HSLFiltering filter = new HSLFiltering(); // set color ranges to keep if (objec[0] == -1) { if (command == "Fetching Bottle") { filter.Hue = bottleH; filter.Saturation = bottleS; filter.Luminance = bottleL; } else if (command == "Fetching Box") { filter.Hue = boxH; filter.Saturation = boxS; filter.Luminance = boxL; } //// apply the filter filter.ApplyInPlace(bmp); BlobCounter blobCounter = new BlobCounter(bmp); int i = blobCounter.ObjectsCount; ExtractBiggestBlob fil = new ExtractBiggestBlob(); int[] pp = new int[2]; pp[0] = 0; pp[1] = 0; int h = 0; if (i > 0) { fil.Apply(bmp); pp[0] = fil.BlobPosition.X; pp[1] = fil.BlobPosition.Y; h = fil.Apply(bmp).Height; } short blobDepth = getDepthAtPoint(pp, depthFrame); this.lblObject.Content = pp[0] + ", " + pp[1] + ", " + blobDepth; this.objec[0] = pp[0]; this.objec[1] = pp[1]; this.objec[2] = blobDepth; } else { filter.Hue = botH; filter.Saturation = botS; filter.Luminance = botL; filter.ApplyInPlace(bmp); BlobCounter blobCounter = new BlobCounter(bmp); int i = blobCounter.ObjectsCount; ExtractBiggestBlob fil = new ExtractBiggestBlob(); int[] pp = new int[2]; pp[0] = 0; pp[1] = 0; int h = 0; if (i > 0) { fil.Apply(bmp); pp[0] = fil.BlobPosition.X; pp[1] = fil.BlobPosition.Y; h = fil.Apply(bmp).Height; } short blobDepth = getDepthAtPoint(pp, depthFrame); this.lblBot.Content = pp[0] + ", " + pp[1] + ", " + blobDepth; this.bot[0] = pp[0]; this.bot[1] = pp[1]; this.bot[2] = blobDepth; } //Assign Manual Position to bot and object } if (humanPosition != null) { this.lblHuman.Content = humanPosition[0] + ", " + humanPosition[1] + ", " + humanPosition[2]; } else { this.lblHuman.Content = "No Human detected"; } if (this.path == 0) { if (humanPosition != null) { if (Bot.moveDoraemon(this.bot[0], this.humanPosition[0], this.bot[2], this.humanPosition[2]) == 0) { this.path = 1; } } } else { if (Bot.moveDoraemon(this.bot[0], this.objec[0], this.bot[2], this.objec[2]) == 0) { Bot.stop(); } } } } } this.lbl.Content = command; } } }
private void button2_Click(object sender, EventArgs e) { pictureBox2.Image = (Bitmap)pictureBox1.Image.Clone(); Bitmap src = new Bitmap(pictureBox2.Image); Bitmap res = new Bitmap(pictureBox2.Image); SaveFileDialog saveDialog = new SaveFileDialog(); src = resize(src, new Size(200, 200)); res = resize(res, new Size(200, 200)); pictureBox2.Image = src; srcImg = src; pictureBox2.Image = res; Bitmap sampleImage = new Bitmap(pictureBox2.Image); var rect = new Rectangle(0, 0, sampleImage.Width, sampleImage.Height); var data = sampleImage.LockBits(rect, ImageLockMode.ReadWrite, sampleImage.PixelFormat); var depth = Bitmap.GetPixelFormatSize(data.PixelFormat) / 8; //bytes per pixel var buffer = new byte[data.Width * data.Height * depth]; //copy pixels to buffer Marshal.Copy(data.Scan0, buffer, 0, buffer.Length); System.Threading.Tasks.Parallel.Invoke( () => { //upper-left Process(buffer, 0, 0, data.Width / 2, data.Height / 2, data.Width, depth); }, () => { //upper-right Process(buffer, data.Width / 2, 0, data.Width, data.Height / 2, data.Width, depth); }, () => { //lower-left Process(buffer, 0, data.Height / 2, data.Width / 2, data.Height, data.Width, depth); }, () => { //lower-right Process(buffer, data.Width / 2, data.Height / 2, data.Width, data.Height, data.Width, depth); } ); //Copy the buffer back to image Marshal.Copy(buffer, 0, data.Scan0, buffer.Length); sampleImage.UnlockBits(data); pictureBox2.Image = sampleImage; dstImg = sampleImage; void Process(byte[] buffer1, int x, int y, int endx, int endy, int width, int depth1) { for (int i = x; i < endx; i++) { for (int j = y; j < endy; j++) { var offset = ((j * width) + i) * depth; var B = buffer[offset + 0]; var G = buffer[offset + 1]; var R = buffer[offset + 2]; var a = Math.Max(R, Math.Max(B, G)); var b = Math.Min(R, Math.Min(B, G)); if (!(((R > 95) && (G > 40) && (B > 20) && ((a - b) > 15) && (Math.Abs(R - G) > 15) && (R > G) && (R > B)) || ((R > 220) && (G > 210) && (B > 170) && ((a - b) > 15) && (Math.Abs(R - G) > 15) && (R > G) && (G > B)))) { buffer[offset + 0] = buffer[offset + 1] = buffer[offset + 2] = 0; } else { buffer[offset + 0] = buffer[offset + 1] = buffer[offset + 2] = 255; } } } } //Graysacle GrayscaleBT709 filter = new GrayscaleBT709(); pictureBox2.Image = filter.Apply((Bitmap)pictureBox2.Image); dstImg = filter.Apply(dstImg); //Dilatation try { Dilatation filter1 = new Dilatation(); pictureBox2.Image = filter1.Apply((Bitmap)pictureBox2.Image); dstImg = filter1.Apply(dstImg); } catch (Exception) { System.Windows.Forms.MessageBox.Show("Apply Grayscale"); } //Biggest Blob Extraction ExtractBiggestBlob filter2 = new ExtractBiggestBlob(); pictureBox2.Image = filter2.Apply((Bitmap)pictureBox2.Image); dstImg = filter2.Apply(dstImg); blob = filter2.BlobPosition; Bitmap newBmp = new Bitmap(dstImg.Width, dstImg.Height, System.Drawing.Imaging.PixelFormat.Format32bppArgb); using (Graphics gfx = Graphics.FromImage(newBmp)) { gfx.DrawImage(dstImg, 0, 0); } //newBmp = dstImg; for (int i = 0; i < dstImg.Width; i++) { for (int j = 0; j < dstImg.Height; j++) { System.Drawing.Color srcColor = srcImg.GetPixel(i + blob.X, j + blob.Y); System.Drawing.Color dstColor = dstImg.GetPixel(i, j); if (!(dstColor.R >= 0 && dstColor.R <= 10 && dstColor.G >= 0 && dstColor.G <= 10 && dstColor.B >= 0 && dstColor.B <= 10)) { newBmp.SetPixel(i, j, srcColor); } } } dstImg = newBmp; pictureBox2.Image = newBmp; List <double> edgeCount = new List <double>(); List <double> ratio = new List <double>(); int pixelCount = 0; Bitmap hoefImage = new Bitmap(pictureBox2.Image); GrayscaleBT709 grayFilter = new GrayscaleBT709(); hoefImage = grayFilter.Apply((Bitmap)pictureBox2.Image); CannyEdgeDetector cannyFilter = new CannyEdgeDetector(0, 0, 1.4); hoefImage = cannyFilter.Apply(hoefImage); pictureBox2.Image = hoefImage; var imgarray = new System.Drawing.Image[36]; for (int i = 0; i < 6; i++) { for (int j = 0; j < 6; j++) { pixelCount++; var index = i * 6 + j; imgarray[index] = new Bitmap(40, 40); var graphics = Graphics.FromImage(imgarray[index]); graphics.DrawImage(hoefImage, new Rectangle(0, 0, 40, 40), new Rectangle(i * 40, j * 40, 40, 40), GraphicsUnit.Pixel); graphics.Dispose(); } } for (int n = 0; n < 36; n++) { int counter = 0; Bitmap bufferImage = new Bitmap(imgarray[n]); for (int i = 0; i < 40; i++) { for (int j = 0; j < 40; j++) { System.Drawing.Color hoefColor = bufferImage.GetPixel(i, j); if (!(hoefColor.R == 0 && hoefColor.G == 0 && hoefColor.B == 0)) { counter++; } } } edgeCount.Add(counter); } double Total = edgeCount.Sum(); foreach (double x in edgeCount) { var a = x / Total; ratio.Add(a); } FileStream fs = new FileStream(@"E:\test.txt", FileMode.Create, FileAccess.Write); StreamWriter sw = new StreamWriter(fs); int no = 0; sw.Write((++no) + " "); for (int i = 0; i < ratio.Count; ++i) { sw.Write(i + ":" + ratio[i].ToString() + " "); } sw.WriteLine(); sw.Close(); fs.Close(); //Support Vector Machine Problem train = Problem.Read(@"E:\AI.txt"); Problem test = Problem.Read(@"E:\test.txt"); Parameter parameters = new Parameter(); double C; double Gamma; parameters.C = 32; parameters.Gamma = 8; Model model = Training.Train(train, parameters); Prediction.Predict(test, @"E:\result.txt", model, false); FileStream fs1 = new FileStream(@"E:\result.txt", FileMode.Open, FileAccess.Read); StreamReader sw1 = new StreamReader(fs1); string w = sw1.ReadLine(); if (w == "1") { MessageBox.Show("A"); } else if (w == "2") { MessageBox.Show("B"); } else if (w == "3") { MessageBox.Show("C"); } else if (w == "4") { MessageBox.Show("D"); } else if (w == "5") { MessageBox.Show("E"); } else if (w == "6") { MessageBox.Show("F"); } else if (w == "7") { MessageBox.Show("G"); } else if (w == "8") { MessageBox.Show("H"); } else if (w == "9") { MessageBox.Show("I"); } else if (w == "10") { MessageBox.Show("J"); } else if (w == "11") { MessageBox.Show("K"); } //else { MessageBox.Show("L"); } }
private void skinColorToolStripMenuItem_Click(object sender, EventArgs e) { //Extracting RGBs Bitmap hand = new Bitmap(pictureBox1.Image, newSize); Bitmap skinDetect = new Bitmap(hand.Width, hand.Height); //Bitmap blackWhite = new Bitmap(hand.Width, hand.Height); Color black = Color.Black; //Color white = Color.White; int i, j; for (i = 0; i < hand.Width; i++) { for (j = 0; j < hand.Height; j++) { Color pixel = hand.GetPixel(i, j); int red = pixel.R; int green = pixel.G; int blue = pixel.B; /* (R, G, B) is classified as skin if: * R > 95 and G > 40 and B > 20 and * max {R, G, B} – min{R, G, B} > 15 and |R – G| > 15 and R > G and R > B */ if ((red > 95 && green > 40 && blue > 20) && (max(red, green, blue) - min(red, green, blue) > 15) && Math.Abs(red - green) > 15 && red > green && red > blue) { //Console.WriteLine("Success"); skinDetect.SetPixel(i, j, pixel); } } } pictureBox2.Image = new Bitmap(skinDetect); pictureBox2.SizeMode = PictureBoxSizeMode.StretchImage; Grayscale filter = new Grayscale(0.2125, 0.71254, 0.0721); Bitmap grayImage = filter.Apply(skinDetect); Threshold filter2 = new Threshold(100); Bitmap filteredImage = filter2.Apply(grayImage); Closing close = new Closing(); close.ApplyInPlace(filteredImage); Opening open = new Opening(); open.ApplyInPlace(filteredImage); // create filter for the filtered image ExtractBiggestBlob filter3 = new ExtractBiggestBlob(); // apply the filter Bitmap biggestBlobsImage = filter3.Apply(filteredImage); AForge.IntPoint a = filter3.BlobPosition; Console.WriteLine(a); //Biggest blob for old extracted skin image ExtractBiggestBlob filter4 = new ExtractBiggestBlob(); Bitmap skinBlob = new Bitmap(skinDetect); //apply filter Bitmap biggestSkinBlob = filter4.Apply(skinBlob); //Skin color for largest blob Bitmap one = new Bitmap(biggestSkinBlob); Bitmap two = new Bitmap(biggestBlobsImage); for (i = 0; i < two.Width; i++) { for (j = 0; j < two.Height; j++) { Color pixelOne = one.GetPixel(i, j); Color pixelTwo = two.GetPixel(i, j); int redOne = pixelOne.R; int greenOne = pixelOne.G; int blueOne = pixelOne.B; int redTwo = pixelTwo.R; int greenTwo = pixelTwo.G; int blueTwo = pixelTwo.B; // This mask is logically AND with original image to extract only the palm which is required for feature extraction. two.SetPixel(i, j, Color.FromArgb(redOne & redTwo, greenOne & greenTwo, blueOne & blueTwo)); } } //Getting a grayscae image from the recolored image Bitmap getGrayImage = filter.Apply(two); // create filter CannyEdgeDetector filter1 = new CannyEdgeDetector(); filter1.LowThreshold = 0; filter1.HighThreshold = 0; filter1.GaussianSigma = 1.4; // apply the filter Bitmap cannyEdgeImage = filter1.Apply(getGrayImage); Bitmap resizeImage = new Bitmap(360, 360); using (var graphics = Graphics.FromImage(resizeImage)) graphics.DrawImage(cannyEdgeImage, 0, 0, 360, 360); pictureBox3.Image = new Bitmap(resizeImage); pictureBox3.SizeMode = PictureBoxSizeMode.StretchImage; int x, y; //Image to obtain blocks for Bitmap imageWithBlock = new Bitmap(resizeImage); Console.WriteLine("Width = " + resizeImage.Width + " Height = " + resizeImage.Height); int imageHeightSize = resizeImage.Height / blockSize; int imageWidthSize = resizeImage.Width / blockSize; Console.WriteLine("Width = " + imageWidthSize + " Height = " + imageHeightSize); List <int> featureVector = new List <int>(); double totalPixelCount = 0; for (i = 0; i < blockSize; i++) { for (j = 0; j < blockSize; j++) { int whiteEdgeCount = 0, blackEdgeCount = 0; for (x = i * imageWidthSize; x < (i * imageWidthSize) + imageWidthSize; x++) { for (y = j * imageHeightSize; y < (j * imageHeightSize) + imageHeightSize; y++) { // To count the edges in the range Color singlePixel = imageWithBlock.GetPixel(x, y); int red = singlePixel.R; int green = singlePixel.G; int blue = singlePixel.B; if (singlePixel != Color.FromArgb(Color.Black.ToArgb())) { whiteEdgeCount++; } else { blackEdgeCount++; } } } //Console.WriteLine("White = " + whiteEdgeCount + " Black = " + blackEdgeCount); //Add value to total count totalPixelCount += whiteEdgeCount; // whiteCount = edges in range featureVector.Add(whiteEdgeCount); } } //Calculate Normalization and add the value to the featureNormVector List <double> featureNormVector = new List <double>(); //Total Pixel Count //Console.WriteLine(totalPixelCount); //Normalization for (i = 0; i < featureVector.Count; i++) { double normalizedValue = featureVector[i] / totalPixelCount; Console.WriteLine(normalizedValue); featureNormVector.Add(normalizedValue); } }
private List <double> automateFeatureNormalizationExtraction(Bitmap rawBitmapData) { Bitmap afterSkinOnly = performSkinExtract(rawBitmapData); Grayscale filter = new Grayscale(0.2125, 0.71254, 0.0721); Bitmap grayImage = filter.Apply(afterSkinOnly); Threshold filter2 = new Threshold(100); Bitmap filteredImage = filter2.Apply(grayImage); Closing close = new Closing(); close.ApplyInPlace(filteredImage); Opening open = new Opening(); open.ApplyInPlace(filteredImage); // create filter for the filtered image ExtractBiggestBlob filter3 = new ExtractBiggestBlob(); // apply the filter Bitmap biggestBlobsImage = filter3.Apply(filteredImage); AForge.IntPoint a = filter3.BlobPosition; //Console.WriteLine(a); //Biggest blob for old extracted skin image ExtractBiggestBlob filter4 = new ExtractBiggestBlob(); Bitmap skinBlob = new Bitmap(afterSkinOnly); //apply filter Bitmap biggestSkinBlob = filter4.Apply(skinBlob); //Skin color for largest blob Bitmap one = new Bitmap(biggestSkinBlob); Bitmap two = new Bitmap(biggestBlobsImage); int i, j; for (i = 0; i < two.Width; i++) { for (j = 0; j < two.Height; j++) { Color pixelOne = one.GetPixel(i, j); Color pixelTwo = two.GetPixel(i, j); int redOne = pixelOne.R; int greenOne = pixelOne.G; int blueOne = pixelOne.B; int redTwo = pixelTwo.R; int greenTwo = pixelTwo.G; int blueTwo = pixelTwo.B; // This mask is logically AND with original image to extract only the palm which is required for feature extraction. two.SetPixel(i, j, Color.FromArgb(redOne & redTwo, greenOne & greenTwo, blueOne & blueTwo)); } } //Getting a grayscae image from the recolored image Bitmap getGrayImage = filter.Apply(two); // create filter CannyEdgeDetector filter1 = new CannyEdgeDetector(); filter1.LowThreshold = 0; filter1.HighThreshold = 0; filter1.GaussianSigma = 1.4; // apply the filter Bitmap cannyEdgeImage = filter1.Apply(getGrayImage); Bitmap resizeImage = new Bitmap(360, 360); using (var graphics = Graphics.FromImage(resizeImage)) graphics.DrawImage(cannyEdgeImage, 0, 0, 360, 360); pictureBox3.Image = new Bitmap(resizeImage); pictureBox3.SizeMode = PictureBoxSizeMode.StretchImage; int x, y; //Image to obtain blocks for Bitmap imageWithBlock = new Bitmap(resizeImage); //Console.WriteLine("Width = " + resizeImage.Width + " Height = " + resizeImage.Height); int imageHeightSize = resizeImage.Height / blockSize; int imageWidthSize = resizeImage.Width / blockSize; //Console.WriteLine("Width = " + imageWidthSize + " Height = " + imageHeightSize); List <int> featureVector = new List <int>(); double totalPixelCount = 0; for (i = 0; i < blockSize; i++) { for (j = 0; j < blockSize; j++) { int whiteEdgeCount = 0, blackEdgeCount = 0; for (x = i * imageWidthSize; x < (i * imageWidthSize) + imageWidthSize; x++) { for (y = j * imageHeightSize; y < (j * imageHeightSize) + imageHeightSize; y++) { // To count the edges in the range Color singlePixel = imageWithBlock.GetPixel(x, y); int red = singlePixel.R; int green = singlePixel.G; int blue = singlePixel.B; if (singlePixel != Color.FromArgb(Color.Black.ToArgb())) { whiteEdgeCount++; } else { blackEdgeCount++; } } } //Console.WriteLine("White = " + whiteEdgeCount + " Black = " + blackEdgeCount); //Add value to total count totalPixelCount += whiteEdgeCount; // whiteCount = edges in range featureVector.Add(whiteEdgeCount); } } //Calculate Normalization and add the value to the featureNormVector List <double> featureNormVector = new List <double>(); //Total Pixel Count //Console.WriteLine(totalPixelCount); //Normalization for (i = 0; i < featureVector.Count; i++) { double normalizedValue = featureVector[i] / totalPixelCount; Console.WriteLine(normalizedValue); featureNormVector.Add(normalizedValue); } Console.WriteLine("Total count of norm(individual)=" + i); return(featureNormVector); }