private void SephiaToolStripMenuItem1_Click(object sender, EventArgs e) { Sepia re = new Sepia(); Bitmap s2 = re.Apply(orImg); pictureBox4.Image = s2; }
public Bitmap ToSepia(Bitmap Im) { Sepia Img = new Sepia(); Bitmap bmImage = AForge.Imaging.Image.Clone(new Bitmap(Im), PixelFormat.Format24bppRgb); return(Img.Apply(bmImage)); }
private async void sepiaToolStripMenuItem_Click(object sender, EventArgs e) { AForge.Imaging.Filters.Sepia sepia = new Sepia(); Bitmap newImageBitmap = await filterBitmap(sepia); currentImage = newImageBitmap; pictureBox.Image = currentImage; }
private void filtr2ToolStripMenuItem_Click(object sender, EventArgs e) { Sepia sepia = new Sepia(); drawArea = sepia.Apply(drawArea); pbImage.Image = drawArea; reset(); }
private void process_button_Click(object sender, EventArgs e) { //changes are made here // a quick way to make a classic image (Brownish image) var sepiafilter = new Sepia(); var sepiaoutputimage = sepiafilter.Apply(_inputImage); pictureBoxOutput.Image = sepiaoutputimage; }
void SepiaToolStripMenuItemClick(object sender, EventArgs e) { //jika gambar kosong/null maka akan mengembalikan nilai kosong/null if (gambar == null) { return; } //membuat filter dari inisiasi class Sepia() pada objek sepia Sepia sepia = new Sepia( ); //clone variable gambar pada variable gambar2 gambar2 = (Bitmap)gambar.Clone(); //aplikasikan filter objek sepia pada gambar2 sepia.ApplyInPlace(gambar2); //tampilkan hasil gambar2 yang sudah diaplikasikan filter pada pictureBox2 pictureBox2.Image = gambar2; }
// New frame event handler, which is invoked on each new available video frame private void video_NewFrame(object sender, NewFrameEventArgs eventArgs) { Invoke((MethodInvoker) delegate() { optChose = comboBox2.SelectedIndex; }); Bitmap bmp = (Bitmap)eventArgs.Frame.Clone(); Bitmap bmpPrc = (Bitmap)eventArgs.Frame.Clone(); switch (optChose) { case 0: Grayscale filter0 = new Grayscale(0.2125, 0.7154, 0.0721); Bitmap grayImage = filter0.Apply(bmpPrc); OtsuThreshold filter1 = new OtsuThreshold(); pictureBox1.Image = bmp; pictureBox2.Image = filter1.Apply(grayImage); break; case 1: Sepia filter2 = new Sepia(); pictureBox1.Image = bmp; pictureBox2.Image = filter2.Apply(bmpPrc); break; case 2: ChannelFiltering filter3 = new ChannelFiltering(); filter3.Red = new IntRange(0, 255); filter3.Green = new IntRange(100, 255); filter3.Blue = new IntRange(100, 255); pictureBox1.Image = bmp; pictureBox2.Image = filter3.Apply(bmpPrc); break; case 3: HSLFiltering filter4 = new HSLFiltering(); filter4.Hue = new IntRange(340, 20); filter4.UpdateLuminance = false; filter4.UpdateHue = false; pictureBox1.Image = bmp; pictureBox2.Image = filter4.Apply(bmpPrc); break; } }
// Start is called before the first frame update public override void Start() { base.Start(); // If the player has the monocle, vision-based quirks don't kick in if (Game.instance.playerStats.IsItemEquipped <Monocle>()) { return; } mSepia = ScriptableObject.CreateInstance <Sepia>(); mSepia.enabled.Override(true); mVolume = PostProcessManager.instance.QuickVolume(LayerMask.NameToLayer("VFXVolume"), 100f, mSepia); VFXVolumeCreated(); mTumbleweed = Game.instance.cinematicDirector.objectMap.GetObjectByName("tumbleweed"); }
private void video_NewFrame(object sender, AForge.Video.NewFrameEventArgs eventArgs) { //jalankan frame camera = (Bitmap)eventArgs.Frame.Clone(); videoPlayer.Image = camera; Thread.Sleep(10); Sepia sp = new Sepia(); videoPlayer.Image = sp.Apply(camera); Thread.Sleep(10); Invert i = new Invert(); videoPlayer.Image = i.Apply(camera); Thread.Sleep(10); Grayscale g = new Grayscale(0.2125, 0.7154, 0.0721); videoPlayer.Image = g.Apply(camera); Thread.Sleep(10); }
/// <summary> /// This is the method that actually does the work. /// </summary> /// <param name="DA">The DA object is used to retrieve from inputs and store in outputs.</param> protected override void SolveInstance(IGH_DataAccess DA) { IGH_Goo goo = null; Image image = new Image(); if (!DA.GetData(0, ref goo)) { return; } if (!goo.TryGetImage(ref image)) { return; } int mode = 0; DA.GetData(1, ref mode); double numVal = 0.5; DA.GetData(2, ref numVal); Filter filter = new Filter(); switch ((FilterModes)mode) { case FilterModes.GrayWorld: SetParameter(2); filter = new GrayWorld(); break; case FilterModes.Histogram: SetParameter(2); filter = new Histogram(); break; case FilterModes.Invert: SetParameter(2); filter = new Invert(); break; case FilterModes.Stretch: SetParameter(2); filter = new Stretch(); break; case FilterModes.WhitePatch: SetParameter(2); filter = new WhitePatch(); break; case FilterModes.Sepia: SetParameter(2); filter = new Sepia(); break; case FilterModes.RGChromacity: SetParameter(2); filter = new RGChromacity(); break; case FilterModes.Brightness: SetParameter(2, "V", "Adjust Value", "[0-1] Unitized adjustment value"); filter = new Brightness(numVal); break; case FilterModes.Contrast: SetParameter(2, "V", "Factor Value", "[0-1] Unitized adjustment value"); filter = new Contrast(numVal); break; case FilterModes.Gamma: SetParameter(2, "V", "Gamma Value", "[0-1] Unitized adjustment value"); filter = new Gamma(numVal); break; case FilterModes.Hue: SetParameter(2, "V", "Hue Value", "[0-1] Unitized adjustment value"); filter = new Hue(numVal); break; case FilterModes.Saturation: SetParameter(2, "V", "Adjust Value", "[0-1] Unitized adjustment value"); filter = new Saturation(numVal); break; } message = ((FilterModes)mode).ToString(); UpdateMessage(); image.Filters.Add(filter); DA.SetData(0, image); DA.SetData(1, filter); }
public static Bitmap ToSepia(this Bitmap bitmap) { var sepiaFilter = new Sepia(); return(sepiaFilter.Apply(AForge.Imaging.Image.Clone(bitmap, PixelFormat.Format24bppRgb))); }
public Bitmap SepiaFilter(Bitmap img) { Sepia sepia = new Sepia(); return(sepia.Apply(img)); }
private void sepiaToolStripMenuItem_Click_1(object sender, EventArgs e) { Sepia sepiaobject = new Sepia(); pictureBox2.Image = sepiaobject.Apply((Bitmap)pictureBox1.Image); }
/// <summary> /// This is the method that actually does the work. /// </summary> /// <param name="DA">The DA object can be used to retrieve data from input parameters and /// to store data in output parameters.</param> protected override void SolveInstance(IGH_DataAccess DA) { Bitmap sourceImage = null; DA.GetData(0, ref sourceImage); string filter = ""; DA.GetData(1, ref filter); sourceImage = ImageUtilities.convert(sourceImage, System.Drawing.Imaging.PixelFormat.Format32bppArgb); IFilter myFilter; Bitmap filteredImage = sourceImage; //Grayscale.CommonAlgorithms.Y.Apply switch (filter) { case "Greyscale": Console.Write("Applying: " + filter); sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); filteredImage = sourceImage; break; case "Sepia": Console.Write("Applying: " + filter); myFilter = new Sepia(); filteredImage = myFilter.Apply(sourceImage); break; case "Invert": Console.Write("Applying: " + filter); sourceImage = ImageUtilities.convert(sourceImage, System.Drawing.Imaging.PixelFormat.Format24bppRgb); myFilter = new Invert(); filteredImage = myFilter.Apply(sourceImage); break; case "RotateChannel": Console.Write("Applying: " + filter); myFilter = new RotateChannels(); filteredImage = myFilter.Apply(sourceImage); break; case "Threshold": //Need Extended Version Console.Write("Applying: " + filter); sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); myFilter = new Threshold(); filteredImage = myFilter.Apply(sourceImage); break; case "FloydFilter": Console.Write("Applying: " + filter); //sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); //myFilter = new FloydSteinbergColorDithering(); FloydSteinbergColorDithering myReduction = new FloydSteinbergColorDithering(); filteredImage = myReduction.Apply(sourceImage); //filteredImage = myFilter.Apply(sourceImage); break; case "OrderedDithering": Console.Write("Applying: " + filter); sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); myFilter = new OrderedDithering(); filteredImage = myFilter.Apply(sourceImage); break; case "Sharpen": Console.Write("Applying: " + filter); myFilter = new Sharpen(); filteredImage = myFilter.Apply(sourceImage); break; case "DifferenceEdgeDetector": Console.Write("Applying: " + filter); sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); myFilter = new DifferenceEdgeDetector(); filteredImage = myFilter.Apply(sourceImage); break; case "HomogenityEdgeDetector": Console.Write("Applying: " + filter); sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); myFilter = new HomogenityEdgeDetector(); filteredImage = myFilter.Apply(sourceImage); break; case "Sobel": Console.Write("Applying: " + filter); sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); myFilter = new SobelEdgeDetector(); filteredImage = myFilter.Apply(sourceImage); break; case "Jitter": Console.Write("Applying: " + filter); myFilter = new Jitter(); //Needs Expand filteredImage = myFilter.Apply(sourceImage); break; case "OilPainting": Console.Write("Applying: " + filter); myFilter = new OilPainting(); //Needs Expand filteredImage = myFilter.Apply(sourceImage); break; case "TextureFiltering": Console.Write("Applying: " + filter); sourceImage = ImageUtilities.convert(sourceImage, System.Drawing.Imaging.PixelFormat.Format24bppRgb); myFilter = new Texturer(new TextileTexture(), 1.0, 0.8); //Needs Expand filteredImage = myFilter.Apply(sourceImage); break; case "Median": Console.Write("Applying: " + filter); myFilter = new Median(); filteredImage = myFilter.Apply(sourceImage); break; case "Mean": Console.Write("Applying: " + filter); myFilter = new Mean(); filteredImage = myFilter.Apply(sourceImage); break; case "Blur": //Need Extended Version Console.Write("Applying: " + filter); myFilter = new GaussianBlur(); filteredImage = myFilter.Apply(sourceImage); break; default: Console.Write("No Filter"); break; } Console.Write(filteredImage.PixelFormat.ToString()); Console.Write(sourceImage.PixelFormat.ToString()); filteredImage = ImageUtilities.convert(filteredImage, System.Drawing.Imaging.PixelFormat.Format32bppArgb); DA.SetData(0, filteredImage); }
public static Image ApplyImageProperties(byte[] blobContent, ImageProperties properties) { Bitmap image = null; try { using (var ms = new MemoryStream(blobContent)) { image = (Bitmap)System.Drawing.Image.FromStream(ms, false, false); image = AForge.Imaging.Image.Clone(image, PixelFormat.Format24bppRgb); if (properties.Crop != null) { AForge.Imaging.Filters.Crop filter = new AForge.Imaging.Filters.Crop(new Rectangle(properties.Crop.XOffset, properties.Crop.YOffset, properties.Crop.CropWidth, properties.Crop.CropHeight)); image = filter.Apply(image); } if (properties.ImageWidth != properties.OriginalWidth || properties.ImageHeight != properties.OriginalHeight) { var filter = new ResizeBicubic(properties.ImageWidth, properties.ImageHeight); image = filter.Apply(image); } if (properties.Colors != null) { if (properties.Colors.TransparentColor != null) { image.MakeTransparent(ColorTranslator.FromHtml("#" + properties.Colors.TransparentColor)); } var brightness = properties.Colors.Brightness; var bfilter = new BrightnessCorrection(brightness); bfilter.ApplyInPlace(image); var contrast = properties.Colors.Contrast; var cfilter = new ContrastCorrection(contrast); cfilter.ApplyInPlace(image); if (properties.Colors.Hue != 0) { var hue = properties.Colors.Hue; HueModifier filter = new HueModifier(hue); filter.ApplyInPlace(image); } var saturation = properties.Colors.Saturation; var sfilter = new SaturationCorrection(saturation * 0.01f); sfilter.ApplyInPlace(image); } # region Effects if (!String.IsNullOrEmpty(properties.Effects)) { var effects = properties.Effects.Split(';'); foreach (var item in effects) { switch (item) { case "Grayscale": var g = new Grayscale(0.2125, 0.7154, 0.0721); image = g.Apply(image); break; case "Sepia": var s = new Sepia(); image = AForge.Imaging.Image.Clone(image, PixelFormat.Format24bppRgb); s.ApplyInPlace(image); break; case "Rotate Channels": image = AForge.Imaging.Image.Clone(image, PixelFormat.Format24bppRgb); var r = new RotateChannels(); r.ApplyInPlace(image); break; case "Invert": var i = new Invert(); i.ApplyInPlace(image); break; case "Blur": var b = new Blur(); b.ApplyInPlace(image); break; case "Gaussian Blur": var gb = new GaussianBlur(4, 11); gb.ApplyInPlace(image); break; case "Convolution": int[,] kernel = { { -2, -1, 0 }, { -1, 1, 1 }, { 0, 1, 2 } }; var c = new Convolution(kernel); c.ApplyInPlace(image); break; case "Edges": var e = new Edges(); e.ApplyInPlace(image); break; } } } # endregion } } catch (Exception) {
/// <summary> /// Applies sepia toning to the image. /// </summary> /// <param name="source">The image this method extends.</param> /// <param name="rectangle"> /// The <see cref="Rectangle"/> structure that specifies the portion of the image object to alter. /// </param> /// <param name="progressHandler">A delegate which is called as progress is made processing the image.</param> /// <returns>The <see cref="Image"/>.</returns> public static Image Sepia(this Image source, Rectangle rectangle, ProgressEventHandler progressHandler = null) { Sepia processor = new Sepia(); processor.OnProgress += progressHandler; try { return source.Process(rectangle, processor); } finally { processor.OnProgress -= progressHandler; } }
public SepiaFilter() { _sepia = new Sepia(); }
public static Bitmap FilterImage(Bitmap img, int filter) { Bitmap sourceImage = img; sourceImage = ImageUtil.convert(sourceImage, System.Drawing.Imaging.PixelFormat.Format32bppArgb); IFilter myFilter; Bitmap filteredImage = sourceImage; if (filter == Filters.filters["Greyscale"]) { sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); filteredImage = sourceImage; } else if (filter == Filters.filters["Sepia"]) { myFilter = new Sepia(); filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["Invert"]) { sourceImage = ImageUtil.convert(sourceImage, System.Drawing.Imaging.PixelFormat.Format24bppRgb); myFilter = new Invert(); filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["RotateChannel"]) { myFilter = new RotateChannels(); filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["Threshold"]) { sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); myFilter = new Threshold(); filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["FloydFilter"]) { FloydSteinbergColorDithering myReduction = new FloydSteinbergColorDithering(); filteredImage = myReduction.Apply(sourceImage); } else if (filter == Filters.filters["OrderedDithering"]) { sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); myFilter = new OrderedDithering(); filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["Sharpen"]) { myFilter = new Sharpen(); filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["DifferenceEdgeDetector"]) { sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); myFilter = new DifferenceEdgeDetector(); filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["HomogenityEdgeDetector"]) { sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); myFilter = new HomogenityEdgeDetector(); filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["Sobel"]) { sourceImage = Grayscale.CommonAlgorithms.RMY.Apply(sourceImage); myFilter = new SobelEdgeDetector(); filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["Jitter"]) { myFilter = new Jitter(); //Needs Expand filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["OilPainting"]) { sourceImage = ImageUtil.convert(sourceImage, System.Drawing.Imaging.PixelFormat.Format24bppRgb); myFilter = new OilPainting(); //Needs Expand filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["TextureFiltering"]) { sourceImage = ImageUtil.convert(sourceImage, System.Drawing.Imaging.PixelFormat.Format24bppRgb); myFilter = new Texturer(new TextileTexture(), 1.0, 0.8); //Needs Expand filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["Median"]) { sourceImage = ImageUtil.convert(sourceImage, System.Drawing.Imaging.PixelFormat.Format24bppRgb); myFilter = new Median(); filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["Mean"]) { myFilter = new Mean(); filteredImage = myFilter.Apply(sourceImage); } else if (filter == Filters.filters["Blur"]) { myFilter = new GaussianBlur(); filteredImage = myFilter.Apply(sourceImage); } //Console.Write(filteredImage.PixelFormat.ToString()); //Console.Write(sourceImage.PixelFormat.ToString()); filteredImage = ImageUtil.convert(filteredImage, System.Drawing.Imaging.PixelFormat.Format32bppArgb); return(filteredImage); }
protected void Button_Click(object sender, EventArgs e) { if (FileUpload1.HasFile) { FileUpload1.SaveAs(Server.MapPath("~/images/") + FileUpload1.FileName); Label5.Text = "File name: " + FileUpload1.PostedFile.FileName; String path; String pathURL; using (System.Drawing.Image tempImage = System.Drawing.Image.FromFile("C:\\inetpub\\wwwroot\\images\\" + FileUpload1.FileName)) { // Create string to draw. String drawString = TextBox4.Text; // Create font and brush. Font drawFont = new Font("Impact", 100); SolidBrush drawBrush = new SolidBrush(Color.Red); // Create point for upper-left corner of drawing. PointF drawPoint = new PointF(75.0F, 25.0F); // Draw string to screen. using (Graphics g = Graphics.FromImage(tempImage)) { g.DrawString(drawString, drawFont, drawBrush, drawPoint); } using (var m = new MemoryStream()) { String time_stamp = Stopwatch.GetTimestamp().ToString(); String uniqueName = TextBox5.Text; path = Server.MapPath("/images/") + uniqueName + time_stamp + FileUpload1.FileName; pathURL = "/images/" + uniqueName + time_stamp + FileUpload1.FileName; new Bitmap(tempImage, 300, 300).Save(path, System.Drawing.Imaging.ImageFormat.Jpeg); Image1.ImageUrl = pathURL; // create a filter Grayscale GrayFilter = new Grayscale(0.2125, 0.7154, 0.0721); Sepia SepiaFilter = new Sepia(); OilPainting OilFilter = new OilPainting(15); // apply the filter Bitmap test = new Bitmap(tempImage, 300, 300); if (RadioButtonList1.SelectedIndex == 0) { } else if (RadioButtonList1.SelectedIndex == 1) { Bitmap sepiaImage = SepiaFilter.Apply(test); sepiaImage.Save(path, System.Drawing.Imaging.ImageFormat.Jpeg); Image1.ImageUrl = pathURL; } else if (RadioButtonList1.SelectedIndex == 2) { Bitmap grayImage = GrayFilter.Apply(test); grayImage.Save(path, System.Drawing.Imaging.ImageFormat.Jpeg); Image1.ImageUrl = pathURL; } else if (RadioButtonList1.SelectedIndex == 3) { Bitmap oilImage = OilFilter.Apply(test); oilImage.Save(path, System.Drawing.Imaging.ImageFormat.Jpeg); Image1.ImageUrl = pathURL; } } } ////These is where I submit the information to the database. string qemail = Request.QueryString["email"]; string memepath = pathURL; string memetext = TextBox4.Text; string timestamp = Stopwatch.GetTimestamp().ToString(); int baseLikes = 0; int uploaded = 0; string connectionString = "uid=myusername;server=192.168.12.136;port=3306;database=it210b;password=123456;"; MySqlConnection connection = new MySqlConnection(connectionString); connection.Open(); ////This is the command for grabbing the userID MySqlCommand IDCommand = new MySqlCommand(); IDCommand.CommandText = "Select userId From it210b.users WHERE email = @qemail"; IDCommand.Parameters.AddWithValue("@qemail", qemail); IDCommand.Connection = connection; string userID = IDCommand.ExecuteScalar().ToString(); ////This is where I insert image info into DB. You have to assign your command a connection to use. MySqlCommand command = new MySqlCommand(); command.Connection = connection; command.CommandText = "Insert INTO it210b.images (imagePath, altText, userId, numLikes, uploaded) VALUES (@imagePath, @altText, @userId, @numLikes, @uploaded )"; command.Prepare(); command.Parameters.AddWithValue("@imagePath", memepath); command.Parameters.AddWithValue("@altText", memetext); command.Parameters.AddWithValue("@userId", userID); command.Parameters.AddWithValue("@numLikes", baseLikes); command.Parameters.AddWithValue("@uploaded", uploaded); command.ExecuteNonQuery(); HtmlMeta meta = new HtmlMeta(); meta.HttpEquiv = "Refresh"; meta.Content = "2;url=http://192.168.12.136:1337/memes"; this.Page.Controls.Add(meta); } else { Label5.Text = "No file found. Please select a file and try again."; } }
public static byte[] ApplyFilter(byte[] imageBytes, ImageProcessingFilters filter, ImageFormat format = null) { IFilter baseFilter = null; switch (filter) { case ImageProcessingFilters.Default: return(imageBytes); case ImageProcessingFilters.GrayscaleBT709: baseFilter = new GrayscaleBT709(); break; case ImageProcessingFilters.GrayscaleRMY: baseFilter = new GrayscaleRMY(); break; case ImageProcessingFilters.GrayscaleY: baseFilter = new GrayscaleY(); break; case ImageProcessingFilters.BayerFilter: baseFilter = new FiltersSequence(); ((FiltersSequence)baseFilter).Add(new ExtractChannel(RGB.B)); ((FiltersSequence)baseFilter).Add(new BayerFilter()); break; /* * case ImageProcessingFilters.ImageWarp: * baseFilter = new ImageWarp( * break; * */ case ImageProcessingFilters.Channel_Red: baseFilter = new ExtractChannel(RGB.R); break; case ImageProcessingFilters.Channel_Green: baseFilter = new ExtractChannel(RGB.G); break; case ImageProcessingFilters.Channel_Blue: baseFilter = new ExtractChannel(RGB.B); break; case ImageProcessingFilters.WaterWave: baseFilter = new WaterWave(); ((WaterWave)baseFilter).HorizontalWavesCount = 10; ((WaterWave)baseFilter).HorizontalWavesAmplitude = 5; ((WaterWave)baseFilter).VerticalWavesCount = 3; ((WaterWave)baseFilter).VerticalWavesAmplitude = 15; break; case ImageProcessingFilters.Sepia: baseFilter = new Sepia(); break; case ImageProcessingFilters.BrightnessCorrection: baseFilter = new BrightnessCorrection(-50); break; case ImageProcessingFilters.ContrastCorrection: baseFilter = new ContrastCorrection(15); break; case ImageProcessingFilters.SaturationCorrection1: baseFilter = new SaturationCorrection(-0.5f); break; case ImageProcessingFilters.SaturationCorrection2: baseFilter = new SaturationCorrection(-.25f); break; case ImageProcessingFilters.SaturationCorrection3: baseFilter = new SaturationCorrection(+0.5f); break; case ImageProcessingFilters.Invert: baseFilter = new Invert(); break; case ImageProcessingFilters.Blur: baseFilter = new Blur(); break; case ImageProcessingFilters.RotateChannels: baseFilter = new RotateChannels(); break; case ImageProcessingFilters.RotateChannels2: baseFilter = new FiltersSequence(); ((FiltersSequence)baseFilter).Add(new RotateChannels()); ((FiltersSequence)baseFilter).Add(new RotateChannels()); break; case ImageProcessingFilters.AdditiveNoise: IRandomNumberGenerator generator = new UniformGenerator(new Range(-50, 50)); baseFilter = new AdditiveNoise(generator); break; case ImageProcessingFilters.GammaCorrection: baseFilter = new GammaCorrection(0.5); break; case ImageProcessingFilters.HistogramEqualization: baseFilter = new HistogramEqualization(); break; case ImageProcessingFilters.OrderedDithering: byte[,] matrix = new byte[4, 4] { { 95, 233, 127, 255 }, { 159, 31, 191, 63 }, { 111, 239, 79, 207 }, { 175, 47, 143, 15 } }; baseFilter = new FiltersSequence(); ((FiltersSequence)baseFilter).Add(new GrayscaleBT709()); ((FiltersSequence)baseFilter).Add(new OrderedDithering(matrix)); break; case ImageProcessingFilters.Pixallete: baseFilter = new Pixellate(); break; case ImageProcessingFilters.SimplePosterization: baseFilter = new SimplePosterization(); break; case ImageProcessingFilters.Texturer_Textile: baseFilter = new Texturer(new AForge.Imaging.Textures.TextileTexture(), 0.3, 0.7); break; case ImageProcessingFilters.Texturer_Cloud: baseFilter = new Texturer(new AForge.Imaging.Textures.CloudsTexture(), 0.3, 0.7); break; case ImageProcessingFilters.Texturer_Marble: baseFilter = new Texturer(new AForge.Imaging.Textures.MarbleTexture(), 0.3, 0.7); break; case ImageProcessingFilters.Texturer_Wood: baseFilter = new Texturer(new AForge.Imaging.Textures.WoodTexture(), 0.3, 0.7); break; case ImageProcessingFilters.Texturer_Labyrinth: baseFilter = new Texturer(new AForge.Imaging.Textures.LabyrinthTexture(), 0.3, 0.7); break; case ImageProcessingFilters.SobelEdgeDetector: baseFilter = new FiltersSequence(); ((FiltersSequence)baseFilter).Add(new ExtractChannel(RGB.R)); ((FiltersSequence)baseFilter).Add(new SobelEdgeDetector()); break; case ImageProcessingFilters.SobelEdgeDetectorInvert: baseFilter = new FiltersSequence(); ((FiltersSequence)baseFilter).Add(new ExtractChannel(RGB.R)); ((FiltersSequence)baseFilter).Add(new SobelEdgeDetector()); ((FiltersSequence)baseFilter).Add(new Invert()); break; case ImageProcessingFilters.SobelEdgeDetectorSepia: baseFilter = new FiltersSequence(); ((FiltersSequence)baseFilter).Add(new ExtractChannel(RGB.R)); ((FiltersSequence)baseFilter).Add(new SobelEdgeDetector()); ((FiltersSequence)baseFilter).Add(new GrayscaleToRGB()); ((FiltersSequence)baseFilter).Add(new Sepia()); break; case ImageProcessingFilters.SobelEdgeDetectorSepiaCanvas: baseFilter = new FiltersSequence(); ((FiltersSequence)baseFilter).Add(new ExtractChannel(RGB.R)); ((FiltersSequence)baseFilter).Add(new SobelEdgeDetector()); ((FiltersSequence)baseFilter).Add(new GrayscaleToRGB()); ((FiltersSequence)baseFilter).Add(new Sepia()); ((FiltersSequence)baseFilter).Add(new SimplePosterization()); ((FiltersSequence)baseFilter).Add(new Texturer(new AForge.Imaging.Textures.TextileTexture(), 0.3, 0.7)); break; case ImageProcessingFilters.Drawing: baseFilter = new FiltersSequence(); ((FiltersSequence)baseFilter).Add(new GrayscaleBT709()); ((FiltersSequence)baseFilter).Add(new SobelEdgeDetector()); ((FiltersSequence)baseFilter).Add(new Invert()); ((FiltersSequence)baseFilter).Add(new SimplePosterization()); break; case ImageProcessingFilters.DrawingSepia: baseFilter = new FiltersSequence(); ((FiltersSequence)baseFilter).Add(new GrayscaleBT709()); ((FiltersSequence)baseFilter).Add(new SobelEdgeDetector()); ((FiltersSequence)baseFilter).Add(new Invert()); ((FiltersSequence)baseFilter).Add(new SimplePosterization()); ((FiltersSequence)baseFilter).Add(new GrayscaleToRGB()); ((FiltersSequence)baseFilter).Add(new Sepia()); break; case ImageProcessingFilters.OilCanvas: baseFilter = new FiltersSequence(); ((FiltersSequence)baseFilter).Add(new SimplePosterization()); ((FiltersSequence)baseFilter).Add(new Texturer(new AForge.Imaging.Textures.TextileTexture(), 0.3, 0.7)); break; case ImageProcessingFilters.OilCanvasGray: baseFilter = new FiltersSequence(); ((FiltersSequence)baseFilter).Add(new SimplePosterization()); ((FiltersSequence)baseFilter).Add(new Texturer(new AForge.Imaging.Textures.TextileTexture(), 0.3, 0.7)); ((FiltersSequence)baseFilter).Add(new GrayscaleBT709()); break; case ImageProcessingFilters.OilCanvasSepia: baseFilter = new FiltersSequence(); ((FiltersSequence)baseFilter).Add(new SimplePosterization()); ((FiltersSequence)baseFilter).Add(new Texturer(new AForge.Imaging.Textures.TextileTexture(), 0.3, 0.7)); ((FiltersSequence)baseFilter).Add(new Sepia()); break; } if (baseFilter == null) { return(null); } return(ApplyFilter(imageBytes, baseFilter, format)); }
public static Bitmap ApplySepiaFilter(Bitmap input) { Sepia sepiaFilter = new Sepia(); return(sepiaFilter.Apply(input)); }