private void btnGriYap_Click(object sender, EventArgs e) { Bitmap image = new Bitmap(pictureBox1.Image); GrayscaleBT709 griResim = new GrayscaleBT709(); pictureBox4.Image = griResim.Apply((Bitmap)pictureBox1.Image); }
private void sobelToolStripMenuItem_Click(object sender, EventArgs e) { islem = new GrayscaleBT709().Apply(kaynak); //islem = new Blur().Apply(islem); islem = new SobelEdgeDetector().Apply(islem); islemBox.Image = islem; }
private void btnMatch_Click(object sender, EventArgs e) { //Converting Template into GrayScale Image Bitmap templateImage = new Bitmap(textBox2.Text); Grayscale gg = new GrayscaleBT709(); Bitmap grayTemplate = gg.Apply(templateImage); // create template matching algorithm's instance ExhaustiveTemplateMatching tm = new ExhaustiveTemplateMatching(0); // find all matchings with specified above similarity Bitmap sourceImage = new Bitmap(textBox1.Text); Bitmap graySource = gg.Apply(sourceImage); TemplateMatch[] matchings = tm.ProcessImage(graySource, grayTemplate); Graphics g = Graphics.FromImage(sourceImage); if (matchings[0].Similarity > 0.8f) { int X = matchings[0].Rectangle.X; int Y = matchings[0].Rectangle.Y; g.DrawRectangle(new Pen(Color.HotPink, 20), X, Y, matchings[0].Rectangle.Width, matchings[0].Rectangle.Height); PicTemplate.Image = sourceImage; MessageBox.Show("Match found..."); } else { MessageBox.Show("Match Not Found..."); } }
private void greyScaleToolStripMenuItem_Click(object sender, EventArgs e) { GrayscaleBT709 gb = new GrayscaleBT709(); newImg = gb.Apply(orgImg); pictureBox2.Image = newImg; }
private void detectSkin() { //skin detection Color clr = Color.FromArgb(Color.White.ToArgb()); Color clr1 = Color.FromArgb(Color.Black.ToArgb()); int imgHeight = imageGot.Height; int imgWidth = imageGot.Width; String sc = ""; var rect = new Rectangle(0, 0, imgWidth, imgHeight); var data = imageGot.LockBits(rect, ImageLockMode.ReadWrite, imageGot.PixelFormat); var depth = Bitmap.GetPixelFormatSize(data.PixelFormat) / 8; //bytes per pixel var buffer = new byte[data.Width * data.Height * depth]; //copy pixels to buffer Marshal.Copy(data.Scan0, buffer, 0, buffer.Length); ProcessDetectSkin(buffer, 0, 0, data.Width, data.Height, data.Width, depth); //Copy the buffer back to image Marshal.Copy(buffer, 0, data.Scan0, buffer.Length); imageGot.UnlockBits(data); GrayscaleBT709 gs = new GrayscaleBT709(); imageGot = gs.Apply(imageGot); Threshold th = new Threshold(); imageGot = th.Apply(imageGot); }
/// <summary> /// Called when videoPlayer receives a new frame. /// </summary> /// <param name="sender"></param> /// <param name="image"></param> private void videoPlayer_NewFrame(object sender, ref System.Drawing.Bitmap image) { // conver frame to grayscale var grayscale = new GrayscaleBT709(); var grayImage = grayscale.Apply(image); // convert image to dlib format var img = grayImage.ToArray2D <RgbPixel>(); // detect face every 4 frames if (frameIndex % 4 == 0) { var faces = faceDetector.Detect(img); if (faces.Length > 0) { currentFace = faces.First(); } } // abort if we don't have a face at this point if (currentFace == default(DlibDotNet.Rectangle)) { return; } // detect facial landmarks var shape = shapePredictor.Detect(img, currentFace); // detect eye state DetectEyeState(image, shape); // update frame counter frameIndex++; }
private void grayScaleToolStripMenuItem_Click(object sender, EventArgs e) { GrayscaleBT709 gray = new GrayscaleBT709(); pictureBox2.Image = gray.Apply((Bitmap)pictureBox1.Image); pictureBox5.Image = gray.Apply((Bitmap)pictureBox4.Image); }
public static Bitmap ConvertToFormat(Bitmap image) { GrayscaleBT709 gg = new GrayscaleBT709(); Bitmap akhir = gg.Apply(image); return(akhir); }
// New frame received by the player private void videoSourcePlayer_NewFrame(object sender, ref Bitmap image) { //DateTime now = DateTime.Now; Graphics g = Graphics.FromImage(image); // paint current time //SolidBrush brush = new SolidBrush( Color.Red ); //g.DrawString( now.ToString( ), this.Font, brush, new PointF( 5, 5 ) ); if (radioButtonFixPicNo.Checked) { Crop filter = new Crop(new Rectangle(50, 300, 400, 400)); Bitmap newImage = filter.Apply(image); ResizeBilinear filter1 = new ResizeBilinear(200, 200); newImage = filter1.Apply(newImage); GrayscaleBT709 filter2 = new GrayscaleBT709(); Bitmap grayImage = filter2.Apply(newImage); ResizeBilinear filter3 = new ResizeBilinear(28, 28); Bitmap smallPic = filter3.Apply(grayImage); pictureBox1.Image = grayImage; pictureBox2.Image = smallPic; } //brush.Dispose( ); g.Dispose( ); }
private void homogenityToolStripMenuItem_Click(object sender, EventArgs e) { GrayscaleBT709 grayObject = new GrayscaleBT709(); pictureBox2.Image = grayObject.Apply((Bitmap)pictureBox1.Image); HomogenityEdgeDetector filter = new HomogenityEdgeDetector(); pictureBox2.Image = filter.Apply((Bitmap)pictureBox2.Image); }
private void binarToolStripMenuItem_Click(object sender, EventArgs e) { GrayscaleBT709 grayoject = new GrayscaleBT709(); pictureBox2.Image = grayoject.Apply((Bitmap)pictureBox1.Image); Threshold filter = new Threshold(); pictureBox2.Image = filter.Apply((Bitmap)pictureBox2.Image); }
private Bitmap ProcessImage(Bitmap frame) { // convert the image to grayscale var grayConverter = new GrayscaleBT709(); var grayFrame = grayConverter.Apply(frame); // use a sobel edge detector to find color edges var edgeDetector = new SobelEdgeDetector(); var edgeFrame = edgeDetector.Apply(grayFrame); // threshold the edges var thresholdConverter = new Threshold(200); thresholdConverter.ApplyInPlace(edgeFrame); // use a blobcounter to find interesting shapes var detector = new BlobCounter() { FilterBlobs = true, MinWidth = 25, MinHeight = 25 }; detector.ProcessImage(edgeFrame); // find the circular shape var shapeDetector = new SimpleShapeChecker(); var blobs = detector.GetObjectsInformation(); var circles = from blob in blobs let edgePoints = detector.GetBlobsEdgePoints(blob) where shapeDetector.CheckShapeType(edgePoints) == ShapeType.Circle select blob; // show the traffic sign if (circles.Count() > 0) { var circleFrame = frame.Clone(circles.First().Rectangle, PixelFormat.DontCare); trafficSignBox.Image = circleFrame; } // highlight every circle in the image using (Graphics g = Graphics.FromImage(frame)) { var rects = detector.GetObjectsRectangles(); var pen = new Pen(Color.Blue, 4); foreach (var circle in circles) { g.DrawRectangle(pen, circle.Rectangle); } } // update picture boxes thresholdBox.Image = edgeFrame; return(frame); }
private void abrirToolStripMenuItem_Click(object sender, EventArgs e) { if (openFileDialog1.ShowDialog() == (System.Windows.Forms.DialogResult.OK)) { pictureBox1.Image = (Bitmap)System.Drawing.Image.FromFile(openFileDialog1.FileName); GrayscaleBT709 gray = new GrayscaleBT709(); pictureBox2.Image = gray.Apply((Bitmap)pictureBox1.Image); } }
private void differenceToolStripMenuItem_Click(object sender, EventArgs e) { GrayscaleBT709 grayObject = new GrayscaleBT709(); pictureBox2.Image = grayObject.Apply((Bitmap)pictureBox1.Image); DifferenceEdgeDetector filter = new DifferenceEdgeDetector(); pictureBox2.Image = filter.Apply((Bitmap)pictureBox2.Image); }
private void kullanilacakcihaz_NewFrame(object sender, NewFrameEventArgs eventArgs) { image = (Bitmap)eventArgs.Frame.Clone(); image1 = (Bitmap)eventArgs.Frame.Clone(); image = new Mirror(false, true).Apply(image); // Ters hareketten dolayı aynalama yapıldı. image1 = new GrayscaleBT709().Apply(image); // Threshold uygulanabilmesi için gri'ye dönüştürüldü. image1 = new Threshold(120).Apply(image1); image1 = new Invert().Apply(image1); // Beyaz görüntü ile çalışmak istediğim için invert yapıldı. pictureBox2.Image = image1; }
public static bool Solve(Bitmap puzzle, out int offsetX) { if (puzzle == null) { offsetX = 0; return(false); } using (Bitmap test = new GrayscaleBT709().Apply(puzzle)) { BitmapData data = test.LockBits(new Rectangle(0, 0, test.Width, test.Height), ImageLockMode.ReadWrite, test.PixelFormat); byte[] points = new byte[data.Stride * test.Height]; Marshal.Copy(data.Scan0, points, 0, points.Length); for (int y = 0; y < test.Height; y++) { for (int x = 0; x < data.Stride; x++) { int i = y * data.Stride + x; if (x > 50 && x < data.Stride - 10 && points[i] - points[i + 1] > 80) { points[i] = points[i + 1]; } else { points[i] = 255; } } } Marshal.Copy(points, 0, data.Scan0, points.Length); test.UnlockBits(data); new Invert().ApplyInPlace(test); new BlobsFiltering { MinHeight = 5, MaxWidth = 1 }.ApplyInPlace(test); BlobCounter counter = new BlobCounter(); counter.ObjectsOrder = ObjectsOrder.Size; counter.ProcessImage(test); Rectangle[] blobs = counter.GetObjectsRectangles(); offsetX = blobs.Length == 1 ? blobs[0].X - 5 : blobs.Length == 2 && blobs[0].X == blobs[1].X ? blobs[0].X - 5 : 0; return(offsetX > 0); } }
private void fillHoleToolStripMenuItem_Click(object sender, EventArgs e) { GrayscaleBT709 greyscale = new GrayscaleBT709(); Bitmap grey = greyscale.Apply(skin); Threshold filter = new Threshold(100); filter.ApplyInPlace(grey); Closing close = new Closing(); Bitmap j = close.Apply(grey); Opening open = new Opening(); k = open.Apply(j); pictureBox3.Image = k; }
private void edgeDetect() { Bitmap news = new Bitmap(imageGot); GrayscaleBT709 gs = new GrayscaleBT709(); imageGot = gs.Apply(imageGot); CannyEdgeDetector cn = new CannyEdgeDetector(); cn.LowThreshold = 0; cn.HighThreshold = 0; cn.GaussianSigma = 1.4; imageGot = cn.Apply(imageGot); pictureBox1.Image = imageGot; }
private void blackAndWhiteToolStripMenuItem_Click(object sender, EventArgs e) { if (checkifthereisimage() != 1) { return; } GrayscaleBT709 gray = new GrayscaleBT709(); try { pictureBox1ImagePreview.Image = gray.Apply((Bitmap)pictureBox1ImagePreview.Image); imgFile = pictureBox1ImagePreview.Image; } catch (Exception ex) { } }
/// <summary> /// Called when videoPlayer receives a new frame. /// </summary> /// <param name="sender"></param> /// <param name="image"></param> private void videoPlayer_NewFrame(object sender, ref Bitmap image) { // convert image to grayscale var gray = new GrayscaleBT709(); var frame = gray.Apply(image); // threshold image to only keep light pixels var threshold = new Threshold(125); threshold.ApplyInPlace(frame); // blank out everything but the road var horizonY = (int)(image.Height * 0.65); var fill = new CanvasFill(new Rectangle(0, 0, image.Width, horizonY), Color.Red); fill.ApplyInPlace(frame); // detect edges var edgeDetector = new CannyEdgeDetector(); edgeDetector.ApplyInPlace(frame); // do a hough line transformation, which will search for straight lines in the frame var transform = new HoughLineTransformation(); transform.ProcessImage(frame); var rawLines = transform.GetMostIntensiveLines(50); // only keep non-horizontal lines that cross the horizon at the vanishing point var lines = from l in rawLines let range = new Range(-75, -65) where range.IsInside(l.Radius) && (l.Theta <= 85 || l.Theta >= 95) select l; // show the edge detection view in the bottom left box edgeBox.Image = frame; // show the lane detection view in the bottom right box var laneImg = new Bitmap(image.Width, image.Height); Utility.DrawHoughLines(lines, laneImg, Color.White, 1); laneBox.Image = laneImg; // draw the lanes on the main camera image too Utility.DrawHoughLines(lines, image, Color.LightGreen, 2); }
/// <summary> /// Called when videoPlayer receives a new frame. /// </summary> /// <param name="sender"></param> /// <param name="image"></param> private void videoPlayer_NewFrame(object sender, ref Bitmap image) { // grayscale the image var grayscale = new GrayscaleBT709(); var frame = grayscale.Apply(image); // only detect once every 5 frames if (frameCounter++ % 5 == 0) { // scan the image and grab the first face var faces = faceDetector.ProcessFrame(frame); if (faces.Count() > 0) { // copy the face faceRect = faces.First(); var face = image.Clone(faces.First(), PixelFormat.DontCare); // search the face for the nose var noses = noseDetector.ProcessFrame(face); if (noses.Count() > 0) { // record new nose location noseRect = noses.First(); noseRect.Offset(faceRect.Location); } } } // draw the mustache using (Graphics g = Graphics.FromImage(image)) { // we want a mustache three times as wide as the nose int mustacheWidth = 2 * noseRect.Width; int mustacheHeight = mustacheWidth * mustache.Height / mustache.Width; // center the mustache on the bottom of the nose int x1 = noseRect.X - (mustacheWidth / 4); int x2 = noseRect.X + noseRect.Width + (mustacheWidth / 4); int y1 = noseRect.Y + noseRect.Height - (mustacheHeight / 2); int y2 = noseRect.Y + noseRect.Height + (mustacheHeight / 2); // draw the mustache g.DrawImage(mustache, x1, y1, x2 - x1, y2 - y1); } }
private void btnMatch_Click(object sender, EventArgs e) { //Przyswojenie bitmapy Bitmap templateImage = new Bitmap(PicTemplate.Image); Grayscale gg = new GrayscaleBT709(); //Converting Template into GrayScale Image Bitmap grayTemplate = gg.Apply(templateImage); //Wyłowanie algorytmu tamplate mathicng // create template matching algorithm's instance ExhaustiveTemplateMatching tm = new ExhaustiveTemplateMatching(0); //Przyswojenie drugiego obrazku Bitmap sourceImage = new Bitmap(PictureInput.Image); Bitmap graySource = gg.Apply(sourceImage); // find all matchings with specified above similarity TemplateMatch[] matchings = tm.ProcessImage(graySource, grayTemplate); lbRes.Text = "Result:"; Graphics g = Graphics.FromImage(sourceImage); if (matchings[0].Similarity > 0.8f) { int X = matchings[0].Rectangle.X; int Y = matchings[0].Rectangle.Y; //ramka dla poszukiwanego objektu g.DrawRectangle(new Pen(Color.Red, 3), X, Y, matchings[0].Rectangle.Width, matchings[0].Rectangle.Height); //Show Original PicTemplate.Image = sourceImage; //Pokaz modal okienko MessageBox.Show("Match found..."); } else { MessageBox.Show("Match Not Found..."); } }
private void brandlyToolStripMenuItem_Click(object sender, EventArgs e) { islem = new GrayscaleBT709().Apply(kaynak); islem = new BradleyLocalThresholding().Apply(islem); islemBox.Image = islem; }
public static byte[] ApplyFilter(byte[] imageBytes, ImageProcessingFilters filter, ImageFormat format = null) { IFilter baseFilter = null; switch (filter) { case ImageProcessingFilters.Default: return(imageBytes); case ImageProcessingFilters.GrayscaleBT709: baseFilter = new GrayscaleBT709(); break; case ImageProcessingFilters.GrayscaleRMY: baseFilter = new GrayscaleRMY(); break; case ImageProcessingFilters.GrayscaleY: baseFilter = new GrayscaleY(); break; case ImageProcessingFilters.BayerFilter: baseFilter = new FiltersSequence(); ((FiltersSequence)baseFilter).Add(new ExtractChannel(RGB.B)); ((FiltersSequence)baseFilter).Add(new BayerFilter()); break; /* * case ImageProcessingFilters.ImageWarp: * baseFilter = new ImageWarp( * break; * */ case ImageProcessingFilters.Channel_Red: baseFilter = new ExtractChannel(RGB.R); break; case ImageProcessingFilters.Channel_Green: baseFilter = new ExtractChannel(RGB.G); break; case ImageProcessingFilters.Channel_Blue: baseFilter = new ExtractChannel(RGB.B); break; case ImageProcessingFilters.WaterWave: baseFilter = new WaterWave(); ((WaterWave)baseFilter).HorizontalWavesCount = 10; ((WaterWave)baseFilter).HorizontalWavesAmplitude = 5; ((WaterWave)baseFilter).VerticalWavesCount = 3; ((WaterWave)baseFilter).VerticalWavesAmplitude = 15; break; case ImageProcessingFilters.Sepia: baseFilter = new Sepia(); break; case ImageProcessingFilters.BrightnessCorrection: baseFilter = new BrightnessCorrection(-50); break; case ImageProcessingFilters.ContrastCorrection: baseFilter = new ContrastCorrection(15); break; case ImageProcessingFilters.SaturationCorrection1: baseFilter = new SaturationCorrection(-0.5f); break; case ImageProcessingFilters.SaturationCorrection2: baseFilter = new SaturationCorrection(-.25f); break; case ImageProcessingFilters.SaturationCorrection3: baseFilter = new SaturationCorrection(+0.5f); break; case ImageProcessingFilters.Invert: baseFilter = new Invert(); break; case ImageProcessingFilters.Blur: baseFilter = new Blur(); break; case ImageProcessingFilters.RotateChannels: baseFilter = new RotateChannels(); break; case ImageProcessingFilters.RotateChannels2: baseFilter = new FiltersSequence(); ((FiltersSequence)baseFilter).Add(new RotateChannels()); ((FiltersSequence)baseFilter).Add(new RotateChannels()); break; case ImageProcessingFilters.AdditiveNoise: IRandomNumberGenerator generator = new UniformGenerator(new Range(-50, 50)); baseFilter = new AdditiveNoise(generator); break; case ImageProcessingFilters.GammaCorrection: baseFilter = new GammaCorrection(0.5); break; case ImageProcessingFilters.HistogramEqualization: baseFilter = new HistogramEqualization(); break; case ImageProcessingFilters.OrderedDithering: byte[,] matrix = new byte[4, 4] { { 95, 233, 127, 255 }, { 159, 31, 191, 63 }, { 111, 239, 79, 207 }, { 175, 47, 143, 15 } }; baseFilter = new FiltersSequence(); ((FiltersSequence)baseFilter).Add(new GrayscaleBT709()); ((FiltersSequence)baseFilter).Add(new OrderedDithering(matrix)); break; case ImageProcessingFilters.Pixallete: baseFilter = new Pixellate(); break; case ImageProcessingFilters.SimplePosterization: baseFilter = new SimplePosterization(); break; case ImageProcessingFilters.Texturer_Textile: baseFilter = new Texturer(new AForge.Imaging.Textures.TextileTexture(), 0.3, 0.7); break; case ImageProcessingFilters.Texturer_Cloud: baseFilter = new Texturer(new AForge.Imaging.Textures.CloudsTexture(), 0.3, 0.7); break; case ImageProcessingFilters.Texturer_Marble: baseFilter = new Texturer(new AForge.Imaging.Textures.MarbleTexture(), 0.3, 0.7); break; case ImageProcessingFilters.Texturer_Wood: baseFilter = new Texturer(new AForge.Imaging.Textures.WoodTexture(), 0.3, 0.7); break; case ImageProcessingFilters.Texturer_Labyrinth: baseFilter = new Texturer(new AForge.Imaging.Textures.LabyrinthTexture(), 0.3, 0.7); break; case ImageProcessingFilters.SobelEdgeDetector: baseFilter = new FiltersSequence(); ((FiltersSequence)baseFilter).Add(new ExtractChannel(RGB.R)); ((FiltersSequence)baseFilter).Add(new SobelEdgeDetector()); break; case ImageProcessingFilters.SobelEdgeDetectorInvert: baseFilter = new FiltersSequence(); ((FiltersSequence)baseFilter).Add(new ExtractChannel(RGB.R)); ((FiltersSequence)baseFilter).Add(new SobelEdgeDetector()); ((FiltersSequence)baseFilter).Add(new Invert()); break; case ImageProcessingFilters.SobelEdgeDetectorSepia: baseFilter = new FiltersSequence(); ((FiltersSequence)baseFilter).Add(new ExtractChannel(RGB.R)); ((FiltersSequence)baseFilter).Add(new SobelEdgeDetector()); ((FiltersSequence)baseFilter).Add(new GrayscaleToRGB()); ((FiltersSequence)baseFilter).Add(new Sepia()); break; case ImageProcessingFilters.SobelEdgeDetectorSepiaCanvas: baseFilter = new FiltersSequence(); ((FiltersSequence)baseFilter).Add(new ExtractChannel(RGB.R)); ((FiltersSequence)baseFilter).Add(new SobelEdgeDetector()); ((FiltersSequence)baseFilter).Add(new GrayscaleToRGB()); ((FiltersSequence)baseFilter).Add(new Sepia()); ((FiltersSequence)baseFilter).Add(new SimplePosterization()); ((FiltersSequence)baseFilter).Add(new Texturer(new AForge.Imaging.Textures.TextileTexture(), 0.3, 0.7)); break; case ImageProcessingFilters.Drawing: baseFilter = new FiltersSequence(); ((FiltersSequence)baseFilter).Add(new GrayscaleBT709()); ((FiltersSequence)baseFilter).Add(new SobelEdgeDetector()); ((FiltersSequence)baseFilter).Add(new Invert()); ((FiltersSequence)baseFilter).Add(new SimplePosterization()); break; case ImageProcessingFilters.DrawingSepia: baseFilter = new FiltersSequence(); ((FiltersSequence)baseFilter).Add(new GrayscaleBT709()); ((FiltersSequence)baseFilter).Add(new SobelEdgeDetector()); ((FiltersSequence)baseFilter).Add(new Invert()); ((FiltersSequence)baseFilter).Add(new SimplePosterization()); ((FiltersSequence)baseFilter).Add(new GrayscaleToRGB()); ((FiltersSequence)baseFilter).Add(new Sepia()); break; case ImageProcessingFilters.OilCanvas: baseFilter = new FiltersSequence(); ((FiltersSequence)baseFilter).Add(new SimplePosterization()); ((FiltersSequence)baseFilter).Add(new Texturer(new AForge.Imaging.Textures.TextileTexture(), 0.3, 0.7)); break; case ImageProcessingFilters.OilCanvasGray: baseFilter = new FiltersSequence(); ((FiltersSequence)baseFilter).Add(new SimplePosterization()); ((FiltersSequence)baseFilter).Add(new Texturer(new AForge.Imaging.Textures.TextileTexture(), 0.3, 0.7)); ((FiltersSequence)baseFilter).Add(new GrayscaleBT709()); break; case ImageProcessingFilters.OilCanvasSepia: baseFilter = new FiltersSequence(); ((FiltersSequence)baseFilter).Add(new SimplePosterization()); ((FiltersSequence)baseFilter).Add(new Texturer(new AForge.Imaging.Textures.TextileTexture(), 0.3, 0.7)); ((FiltersSequence)baseFilter).Add(new Sepia()); break; } if (baseFilter == null) { return(null); } return(ApplyFilter(imageBytes, baseFilter, format)); }
private void resultGestureToolStripMenuItem_Click(object sender, EventArgs e) { int dir; int no; List <string> filedir = new List <string>(Directory.GetDirectories(path)); for (dir = 0, no = 0; (dir < filedir.Count && no <= 26); dir++, no++) { string[] filePaths = Directory.GetFiles(filedir[dir].ToString()); List <Bitmap> y = new List <Bitmap>(); foreach (var iI in filePaths) { Bitmap Image = new Bitmap(iI); y.Add(Image); } foreach (Bitmap img in y) { pictureBox1.Image = img; srcImg = img; dstImg = img; Bitmap skin = new Bitmap(pictureBox1.Image); var rect = new Rectangle(0, 0, skin.Width, skin.Height); var data = skin.LockBits(rect, ImageLockMode.ReadWrite, skin.PixelFormat); var depth = Bitmap.GetPixelFormatSize(data.PixelFormat) / 8; //bytes per pixel var buffer = new byte[data.Width * data.Height * depth]; //copy pixels to buffer Marshal.Copy(data.Scan0, buffer, 0, buffer.Length); System.Threading.Tasks.Parallel.Invoke( () => { //upper-left Process(buffer, 0, 0, data.Width / 2, data.Height / 2, data.Width, depth); }, () => { //upper-right Process(buffer, data.Width / 2, 0, data.Width, data.Height / 2, data.Width, depth); }, () => { //lower-left Process(buffer, 0, data.Height / 2, data.Width / 2, data.Height, data.Width, depth); }, () => { //lower-right Process(buffer, data.Width / 2, data.Height / 2, data.Width, data.Height, data.Width, depth); } ); //Copy the buffer back to image Marshal.Copy(buffer, 0, data.Scan0, buffer.Length); skin.UnlockBits(data); pictureBox2.Image = skin; Bitmap src = new Bitmap(pictureBox1.Image); Bitmap res = new Bitmap(pictureBox2.Image); src = resize(src, new Size(200, 200)); res = resize(res, new Size(200, 200)); pictureBox1.Image = src; pictureBox2.Image = res; GrayscaleBT709 grayoject = new GrayscaleBT709(); pictureBox2.Image = grayoject.Apply((Bitmap)pictureBox2.Image); Dilatation filter = new Dilatation(); // apply the filter pictureBox2.Image = filter.Apply((Bitmap)pictureBox2.Image); ExtractBiggestBlob filter1 = new ExtractBiggestBlob(); pictureBox2.Image = filter.Apply((Bitmap)pictureBox2.Image); blob = filter1.BlobPosition; Bitmap src1 = new Bitmap(pictureBox1.Image); Bitmap res1 = new Bitmap(pictureBox2.Image); Bitmap newBmp = new Bitmap(src1.Width, res1.Height, System.Drawing.Imaging.PixelFormat.Format32bppArgb); //Threshold t = new Threshold(); //pictureBox2.Image = t.Apply((Bitmap)pictureBox2.Image); for (int i = 0; i < res1.Width; i++) { for (int j = 0; j < res1.Height; j++) { System.Drawing.Color srcColor = src1.GetPixel(i + blob.X, j + blob.Y); System.Drawing.Color dstColor = res1.GetPixel(i, j); if (!(dstColor.R >= 0 && dstColor.R <= 10 && dstColor.G >= 0 && dstColor.G <= 10 && dstColor.B >= 0 && dstColor.B <= 10)) { newBmp.SetPixel(i, j, srcColor); } else { newBmp.SetPixel(i, j, Color.Black); } } } res1 = newBmp; pictureBox2.Image = newBmp; List <double> edgeCount = new List <double>(); List <double> ratio = new List <double>(); int pixelCount = 0; Bitmap Destimg = new Bitmap(pictureBox2.Image); GrayscaleBT709 go = new GrayscaleBT709(); pictureBox2.Image = go.Apply((Bitmap)pictureBox2.Image); Destimg = go.Apply(Destimg); CannyEdgeDetector filter2 = new CannyEdgeDetector(0, 0, 1.4); pictureBox2.Image = filter2.Apply((Bitmap)pictureBox2.Image); Destimg = filter2.Apply(Destimg); var imgarray = new System.Drawing.Image[36]; for (int i = 0; i < 6; i++) { for (int j = 0; j < 6; j++) { pixelCount++; var index = i * 6 + j; imgarray[index] = new Bitmap(40, 40); var graphics = Graphics.FromImage(imgarray[index]); graphics.DrawImage(Destimg, new Rectangle(0, 0, 40, 40), new Rectangle(i * 40, j * 40, 40, 40), GraphicsUnit.Pixel); graphics.Dispose(); } } for (int n = 0; n < 36; n++) { int counter = 0; Bitmap bufferImage = new Bitmap(imgarray[n]); for (int i = 0; i < 40; i++) { for (int j = 0; j < 40; j++) { System.Drawing.Color hoefColor = bufferImage.GetPixel(i, j); //if(hoefColor.R<=255 && hoefColor.R>=230 && hoefColor.G <= 255 && hoefColor.G >= 230 && hoefColor.B <= 255 && hoefColor.B >= 230) if (!(hoefColor.R == 0 && hoefColor.G == 0 && hoefColor.B == 0)) { counter++; } } } edgeCount.Add(counter); } double total = edgeCount.Sum(); foreach (double x in edgeCount) { var a = (float)x / total; ratio.Add(a); } FileStream fs = new FileStream(@"D:\AI.txt", FileMode.Append, FileAccess.Write); StreamWriter sw = new StreamWriter(fs); sw.Write((no) + " "); for (int i = 0; i < ratio.Count; ++i) { sw.Write(i + ":" + ratio[i].ToString() + " "); } sw.WriteLine(); sw.Close(); fs.Close(); Problem train = Problem.Read(@"D:\AI.txt"); Problem test = Problem.Read(@"D:\test.txt"); Parameter parameters = new Parameter(); double C; double Gamma; parameters.C = 32; parameters.Gamma = 8; Model model = Training.Train(train, parameters); Prediction.Predict(test, @"D:\result.txt", model, false); } } }
private void hOGToolStripMenuItem_Click(object sender, EventArgs e) { List <double> edgeCount = new List <double>(); List <double> ratio = new List <double>(); int pixelCount = 0; Bitmap Destimg = new Bitmap(pictureBox2.Image); GrayscaleBT709 go = new GrayscaleBT709(); pictureBox2.Image = go.Apply((Bitmap)pictureBox2.Image); Destimg = go.Apply(Destimg); CannyEdgeDetector filter = new CannyEdgeDetector(0, 0, 1.4); pictureBox2.Image = filter.Apply((Bitmap)pictureBox2.Image); Destimg = filter.Apply(Destimg); var imgarray = new System.Drawing.Image[36]; for (int i = 0; i < 6; i++) { for (int j = 0; j < 6; j++) { pixelCount++; var index = i * 6 + j; imgarray[index] = new Bitmap(40, 40); var graphics = Graphics.FromImage(imgarray[index]); graphics.DrawImage(Destimg, new Rectangle(0, 0, 40, 40), new Rectangle(i * 40, j * 40, 40, 40), GraphicsUnit.Pixel); graphics.Dispose(); } } for (int n = 0; n < 36; n++) { int counter = 0; Bitmap bufferImage = new Bitmap(imgarray[n]); for (int i = 0; i < 40; i++) { for (int j = 0; j < 40; j++) { System.Drawing.Color hoefColor = bufferImage.GetPixel(i, j); //if(hoefColor.R<=255 && hoefColor.R>=230 && hoefColor.G <= 255 && hoefColor.G >= 230 && hoefColor.B <= 255 && hoefColor.B >= 230) if (!(hoefColor.R == 0 && hoefColor.G == 0 && hoefColor.B == 0)) { counter++; } } } edgeCount.Add(counter); //HistogramEqualization /*if (File.Exists(@"D:\AI.txt")) * { * using (StreamWriter ssw = new StreamWriter(@"D:\AI.txt")) * { * ssw.Write(counter); * //tw.WriteLine(Lists.edgeCount); * //tw.Close(); * * * } * }*/ } double total = edgeCount.Sum(); foreach (double x in edgeCount) { var a = (float)x / total; ratio.Add(a); } FileStream fs = new FileStream(@"D:\AI.txt", FileMode.Append, FileAccess.Write); StreamWriter sw = new StreamWriter(fs); for (int i = 0; i < ratio.Count; ++i) { sw.Write(i + ":" + ratio[i].ToString() + " "); } sw.WriteLine(); sw.Close(); fs.Close(); }
private void reapplyToolStripMenuItem7_Click(object sender, EventArgs e) { GrayscaleBT709 grayoject = new GrayscaleBT709(); pictureBox2.Image = grayoject.Apply((Bitmap)pictureBox2.Image); }
private void otsuToolStripMenuItem_Click(object sender, EventArgs e) { islem = new GrayscaleBT709().Apply(kaynak); islem = new OtsuThreshold().Apply(islem); islemBox.Image = islem; }
private Bitmap ProcessImage(Bitmap image) { // convert to grayscale var gray = new GrayscaleBT709(); var frame = gray.Apply(image); // boost contrast var contrast = new ContrastCorrection(); contrast.ApplyInPlace(frame); // set up a haar object detector to find faces var cascade = new Accord.Vision.Detection.Cascades.FaceHaarCascade(); var detector = new HaarObjectDetector(cascade); detector.MinSize = new Size(150, 150); detector.ScalingFactor = 1.03f; detector.SearchMode = ObjectDetectorSearchMode.Average; detector.ScalingMode = ObjectDetectorScalingMode.SmallerToGreater; // scan the image var faces = detector.ProcessFrame(frame); // create thumbnails for each face var resizer = new ResizeNearestNeighbor(128, 128); foreach (var rect in faces) { // zoom out a bit var r = rect; r.Inflate(50, 50); r.X = r.X < 0 ? 0 : r.X; r.Y = r.Y < 0 ? 0 : r.Y; // copy the face into a new picturebox using (var face = image.Clone(r, PixelFormat.Format24bppRgb)) { var smallFace = resizer.Apply(face); var thumbnail = new PictureBox() { Image = smallFace, SizeMode = PictureBoxSizeMode.Zoom, Width = 62, Height = 62 }; imagesPanel.Controls.Add(thumbnail); } } // highlight all detected faces in the main image using (Graphics g = Graphics.FromImage(image)) { var pen = new Pen(Color.LightGreen, 4); foreach (var face in faces) { g.DrawRectangle(pen, face); } } return(image); }
private void segmentationToolStripMenuItem_Click(object sender, EventArgs e) { Grayscale grayscale = new GrayscaleBT709(); grayimage = grayscale.Apply(original); pictureBox2.Image = grayimage; Threshold bw = new Threshold(); thresholdimage = bw.Apply(grayimage); pictureBox2.Image = thresholdimage; // ResizeBicubic resize = new ResizeBicubic(200, thresholdimage.Height); // thresholdimage = resize.Apply(thresholdimage); List <int> xC = new List <int> (); int xPrev = 0; for (int i = 0; i < thresholdimage.Width; i++) { count = 0; for (int j = 0; j < thresholdimage.Height; j++) { Color p = thresholdimage.GetPixel(i, j); if (p.R == 0 && p.G == 0 && p.B == 0) { count++; } } if (count != 0) { if (xC.Count == 0) { xC.Add(i); } else { if (i - xPrev > 1) { xC.Add(i); } } xPrev = i; } } List <Bitmap> ni = new List <Bitmap>(); for (int i = 0; i < xC.Count; i++) { int endbound; if (i + 1 >= xC.Count) { endbound = thresholdimage.Width; } else { endbound = xC[i + 1]; } Crop cr = new Crop(new Rectangle(xC[i], 0, endbound - xC[i], thresholdimage.Height)); newImage = cr.Apply(thresholdimage); ni.Add(newImage); pictureBox4.Image = newImage; // newImage.Save(@"C:\users\rahul\desktop\sampleImage.png"); } Invert inv = new Invert(); invertimage = inv.Apply(newImage); pictureBox5.Image = invertimage; ExtractBiggestBlob ebb = new ExtractBiggestBlob(); blobimage = ebb.Apply(invertimage); pictureBox5.Image = blobimage; Invert inv1 = new Invert(); invertimage1 = inv1.Apply(blobimage); pictureBox5.Image = invertimage1; List <int> yC = new List <int>(); int xPrev1 = 0; for (int j = 0; j < thresholdimage.Height; j++) { count = 0; for (int i = 0; i < thresholdimage.Width; i++) { Color p = thresholdimage.GetPixel(i, j); if (p.R == 0 && p.G == 0 && p.B == 0) { count++; } if (count != 0) { if (yC.Count == 0) { yC.Add(j); } else { if (j - xPrev1 > 1) { yC.Add(j); } } xPrev1 = j; } } } List <Bitmap> ni1 = new List <Bitmap>(); for (int j = 0; j < yC.Count; j++) { int endbound; if (j + 1 >= yC.Count) { endbound = thresholdimage.Height; } else { endbound = yC[j + 1]; } Crop cr = new Crop(new Rectangle(0, yC[j], thresholdimage.Height, endbound - yC[j])); newImage3 = cr.Apply(thresholdimage); ni1.Add(newImage3); pictureBox3.Image = newImage3; // newImage.Save(@"C:\users\rahul\desktop\sampleImage1.png"); } Invert inv2 = new Invert(); invertimage3 = inv2.Apply(newImage3); pictureBox6.Image = invertimage3; ExtractBiggestBlob ebb1 = new ExtractBiggestBlob(); blobimage1 = ebb1.Apply(invertimage3); pictureBox6.Image = blobimage1; Invert inv3 = new Invert(); invertimage2 = inv3.Apply(blobimage1); pictureBox6.Image = invertimage2; }
public override void consumeMessage(IMessage message, int msgID) { VisionMessage vm = message as VisionMessage; // Bitmap bitmap = ByteTools.BytesToBmp(vm.Bitmap, vm.BmpWidth, vm.BmpHeight, ByteTools.bppToPixelFormat(vm.BytePerPixel)); Bitmap bitmap = vm.Bitmap; Bitmap rgb = bitmap; bitmap = new ColorRemapping(redMap, greenMap, blueMap).Apply(bitmap); bitmap = new GrayscaleBT709().Apply(bitmap); bitmap = new Threshold(32).Apply(bitmap); //rgb = new GrayscaleToRGB().Apply(bitmap); Graphics g = Graphics.FromImage(rgb); if (g == null) { Console.WriteLine("Null Graphics!"); return; } if (!threadRunning) { threadRunning = true; for (int i = 0; i < 3; i++) { if (g != null && currentStateProbs[i] > double.MinValue) { drawLine(g, new HoughLine((double)currentThetaStates[i], (short)currentRStates[i], 0, 0d), new Rectangle(0, heightRes[0], bitmap.Width, (heightRes[heightRes.Length - 1] - heightRes[0]) * 2)); } } ThreadStart starter = delegate { findLines(bitmap); }; Thread innerThread = new Thread(starter); innerThread.Start(); } vm = new VisionMessage(rgb.Height, rgb.Width, ByteTools.pixelFormatToBPP(rgb.PixelFormat), rgb); msgService.sendMsg(vm); }