/// <summary> /// Update routine for first camera. /// </summary> /// <param name="sender"></param> /// <param name="image"></param> private void Camera1_Update(object sender, ref Bitmap image) { if (Executed) { Bitmap invrt = FilterInvert.Apply(image); FilterColor.ApplyInPlace(invrt); FilterSaturateLeft.ApplyInPlace(image); // create an instance of blob counter algorithm. BlobCounter objCounter = new BlobCounter(); objCounter.FilterBlobs = true; objCounter.CoupledSizeFiltering = true; objCounter.MinWidth = (int)(0.05f * Wdth); objCounter.MinHeight = (int)(0.05f * Hght); objCounter.MaxWidth = (int)(0.4f * Wdth); objCounter.MaxHeight = (int)(0.4f * Hght); // process image. objCounter.ProcessImage(invrt); ObjectsLeft = objCounter.GetObjectsRectangles(); ObjectDescLeft = new Descriptor[ObjectsLeft.Length]; // Draw lines. Graphics graphic = Graphics.FromImage(image); Pen pen = new Pen(Color.Yellow); pen.Width = 7; Font font = new Font("Courier", 14, FontStyle.Regular); SolidBrush brush = new SolidBrush(Color.Yellow); for (int a = 0; a < ObjectsLeft.Length; a++) { // Set descriptor of each detected object. SetDescriptor(ref ObjectDescLeft[a], ref ObjectsLeft[a], image); pen.Color = ObjectDescLeft[a].ColorMean; graphic.DrawRectangle(pen, ObjectsLeft[a]); graphic.DrawString(a.ToString() + " | " + ObjectDescLeft[a].ColorClass + "\n" + ObjectDescLeft[a].PercentR.ToString() + "%\n" + ObjectDescLeft[a].PercentG.ToString() + "%\n" + ObjectDescLeft[a].PercentB.ToString() + "%", font, brush, ObjectsLeft[a].X, ObjectsLeft[a].Y); } pen.Dispose(); graphic.Dispose(); } else { Graphics graphic = Graphics.FromImage(image); Pen pen = new Pen(Color.Yellow); pen.Width = 6; graphic.DrawRectangle(pen, new Rectangle(30, 30, Wdth - 60, Hght - 60)); pen.Width = 3; graphic.DrawRectangle(pen, new Rectangle(90, 90, Wdth - 180, Hght - 180)); pen.Dispose(); graphic.Dispose(); } }
private void video_NewFrame(object sender, NewFrameEventArgs eventArgs)//get picture from camera { Bitmap bitmap = (Bitmap)eventArgs.Frame.Clone(); if (flag == 0) { //bitmap.Save(img); flag = 1; bitmap = new Bitmap(bitmap, new Size(320, 240)); ContrastCorrection filter = new ContrastCorrection(tempform.contrast); BrightnessCorrection bfilter = new BrightnessCorrection(tempform.brightness); SaturationCorrection sfilter = new SaturationCorrection(tempform.saturation); bfilter.ApplyInPlace(bitmap); filter.ApplyInPlace(bitmap); sfilter.ApplyInPlace(bitmap); pictureBox1.Image = bitmap; } }
private void video_NewFrame(object sender, NewFrameEventArgs eventArgs) { Bitmap bitmap = (Bitmap)eventArgs.Frame.Clone(); if (flag == 0) { flag = 1; bitmap = new Bitmap(bitmap, new Size(320, 240)); ContrastCorrection filter = new ContrastCorrection(contrast); BrightnessCorrection bfilter = new BrightnessCorrection(brightness); SaturationCorrection sfilter = new SaturationCorrection(saturation); bfilter.ApplyInPlace(bitmap); filter.ApplyInPlace(bitmap); sfilter.ApplyInPlace(bitmap); for (int j = 0; j < methodtemp.getsettinglist().Count; j++) { ((PictureBox)tabControl1.Controls[j].Controls[0].Controls[0]).Image = (Bitmap)bitmap.Clone(); } } }
/// <summary>Handles the Click event of the Process button which crops the main image and applies the filter stack to generate the output image.</summary> /// <param name="sender">The source of the event.</param> /// <param name="e">The <see cref="EventArgs"/> instance containing the event data.</param> private void cmdProcess_Click(object sender, EventArgs e) { if (!_placed) { return; } if (pcFullImage.Image == null) { return; } Bitmap b = new Bitmap(pbSource.Width, pbSource.Height); Graphics g = Graphics.FromImage(b); g.Clear(Color.White); g.DrawImage(pcFullImage.Image, new Rectangle(0, 0, pbSource.Width, pbSource.Height), _selection.X, _selection.Y, _selection.Width, _selection.Height, GraphicsUnit.Pixel); g.Dispose(); pbSource.Image = b; if (filterStack.Count != 0) { b = filterStack.Apply(b); } var sc = new SaturationCorrection(); if (sc.FormatTranslations.ContainsKey(b.PixelFormat)) { sc.AdjustValue = hsbSaturation.Value / 100.0f; // apply the filter sc.ApplyInPlace(b); } Bitmap greyb = null; if (AForge.Imaging.Filters.Grayscale.CommonAlgorithms.BT709.FormatTranslations.ContainsKey(b.PixelFormat)) { greyb = AForge.Imaging.Filters.Grayscale.CommonAlgorithms.BT709.Apply(b); } else { this.Text = "Cannot convert to greyscale"; greyb = b; } var bc = new BrightnessCorrection(); if (bc.FormatTranslations.ContainsKey(b.PixelFormat)) { bc.AdjustValue = hsbBrightness.Value; bc.ApplyInPlace(greyb); } var cc = new ContrastCorrection(); if (cc.FormatTranslations.ContainsKey(b.PixelFormat)) { cc.Factor = hsbContrast.Value; cc.ApplyInPlace(greyb); } if (filterStack.Count == 0) { var sharpen = new Sharpen(); if (sharpen.FormatTranslations.ContainsKey(b.PixelFormat)) { sharpen.ApplyInPlace(greyb); } } if (chkInvert.Checked) { var invert = new Invert(); invert.ApplyInPlace(greyb); } pbInt.Image = greyb; b = greyb; //BaseInPlacePartialFilter filter = new AForge.Imaging.Filters.FloydSteinbergDithering(); BaseInPlacePartialFilter filter = null; if (cmbAlgorithm.SelectedItem != null && cmbAlgorithm.SelectedItem is AForge.Imaging.Filters.BaseInPlacePartialFilter) { filter = cmbAlgorithm.SelectedItem as AForge.Imaging.Filters.BaseInPlacePartialFilter; } if (filter == null) { filter = new AForge.Imaging.Filters.SierraDithering(); } if (filter.FormatTranslations.ContainsKey(b.PixelFormat)) { var ditheredb = filter.Apply(b); pbDest.Image = ditheredb; this.Text = "Badger!"; } else { this.Text = "Cannot dither this image!"; } }
public static Image ApplyImageProperties(byte[] blobContent, ImageProperties properties) { Bitmap image = null; try { using (var ms = new MemoryStream(blobContent)) { image = (Bitmap)System.Drawing.Image.FromStream(ms, false, false); image = AForge.Imaging.Image.Clone(image, PixelFormat.Format24bppRgb); if (properties.Crop != null) { AForge.Imaging.Filters.Crop filter = new AForge.Imaging.Filters.Crop(new Rectangle(properties.Crop.XOffset, properties.Crop.YOffset, properties.Crop.CropWidth, properties.Crop.CropHeight)); image = filter.Apply(image); } if (properties.ImageWidth != properties.OriginalWidth || properties.ImageHeight != properties.OriginalHeight) { var filter = new ResizeBicubic(properties.ImageWidth, properties.ImageHeight); image = filter.Apply(image); } if (properties.Colors != null) { if (properties.Colors.TransparentColor != null) { image.MakeTransparent(ColorTranslator.FromHtml("#" + properties.Colors.TransparentColor)); } var brightness = properties.Colors.Brightness; var bfilter = new BrightnessCorrection(brightness); bfilter.ApplyInPlace(image); var contrast = properties.Colors.Contrast; var cfilter = new ContrastCorrection(contrast); cfilter.ApplyInPlace(image); if (properties.Colors.Hue != 0) { var hue = properties.Colors.Hue; HueModifier filter = new HueModifier(hue); filter.ApplyInPlace(image); } var saturation = properties.Colors.Saturation; var sfilter = new SaturationCorrection(saturation * 0.01f); sfilter.ApplyInPlace(image); } # region Effects if (!String.IsNullOrEmpty(properties.Effects)) { var effects = properties.Effects.Split(';'); foreach (var item in effects) { switch (item) { case "Grayscale": var g = new Grayscale(0.2125, 0.7154, 0.0721); image = g.Apply(image); break; case "Sepia": var s = new Sepia(); image = AForge.Imaging.Image.Clone(image, PixelFormat.Format24bppRgb); s.ApplyInPlace(image); break; case "Rotate Channels": image = AForge.Imaging.Image.Clone(image, PixelFormat.Format24bppRgb); var r = new RotateChannels(); r.ApplyInPlace(image); break; case "Invert": var i = new Invert(); i.ApplyInPlace(image); break; case "Blur": var b = new Blur(); b.ApplyInPlace(image); break; case "Gaussian Blur": var gb = new GaussianBlur(4, 11); gb.ApplyInPlace(image); break; case "Convolution": int[,] kernel = { { -2, -1, 0 }, { -1, 1, 1 }, { 0, 1, 2 } }; var c = new Convolution(kernel); c.ApplyInPlace(image); break; case "Edges": var e = new Edges(); e.ApplyInPlace(image); break; } } } # endregion } } catch (Exception) {