示例#1
0
        public static void Pixellate(this Bitmap image, Rectangle rec)
        {
            Pixellate filter = new Pixellate();

            // apply the filter
            filter.ApplyInPlace(image, rec);
        }
示例#2
0
        private Bitmap XXX(Bitmap bmpBefore, Bitmap bmpAfter)
        {
            var filter = new Grayscale(0.2125, 0.7154, 0.0721);

            bmpBefore = filter.Apply(bmpBefore);
            bmpAfter  = filter.Apply(bmpAfter);

            // create filters
            var     differenceFilter = new Difference();
            IFilter thresholdFilter  = new Threshold(15);

            // set backgroud frame as an overlay for difference filter
            differenceFilter.OverlayImage = bmpBefore;
            // apply the filters
            Bitmap  tmp1          = differenceFilter.Apply(bmpAfter);
            Bitmap  tmp2          = thresholdFilter.Apply(tmp1);
            IFilter erosionFilter = new Erosion();
            // apply the filter
            Bitmap tmp3 = erosionFilter.Apply(tmp2);

            IFilter pixellateFilter = new Pixellate();
            // apply the filter
            Bitmap tmp4 = pixellateFilter.Apply(tmp3);

            return(tmp4);
        }
示例#3
0
        private void PixellateToolStripMenuItem_Click(object sender, EventArgs e)
        {
            Pixellate rs = new Pixellate();
            Bitmap    s3 = rs.Apply(orImg);

            pictureBox5.Image = s3;
        }
示例#4
0
        public Bitmap ToPixellate(Bitmap Im)
        {
            AForge.Imaging.Filters.Pixellate Img = new Pixellate();
            Bitmap bmImage = AForge.Imaging.Image.Clone(new Bitmap(Im), PixelFormat.Format24bppRgb);

            return(Img.Apply(bmImage));
        }
        public VideoProcessor()
        {
            background = null;

            pixelateFilter           = new Pixellate();
            pixelateFilter.PixelSize = 10;

            differenceFilter = new Difference();
            thresholdFilter  = new Threshold(15);
            grayscaleFilter  = new Grayscale(0.2125, 0.7154, 0.0721);
            erosionFilter    = new Erosion();

            moveTowardsFilter = new MoveTowards();

            filters1 = new FiltersSequence();
            filters1.Add(pixelateFilter);
            filters1.Add(grayscaleFilter);

            filters2 = new FiltersSequence();

            filters2.Add(differenceFilter);
            filters2.Add(thresholdFilter);
            filters2.Add(erosionFilter);

            rat1 = new Tracker(640 / 2, 480 / 2, Color.Red);

            rat2 = new Tracker(400, 300, Color.Green);

            counter = 0;
        }
示例#6
0
        public mEffectPixelate(int pixelWidth, int pixelHeight)
        {
            BitmapType = mFilter.BitmapTypes.Rgb24bpp;

            PixelWidth  = pixelWidth;
            PixelHeight = pixelHeight;

            filter = new Pixellate(PixelWidth, PixelHeight);
        }
示例#7
0
        private void InitFilters()
        {
            L_brownFilter = new ColorFiltering();
            D_brownFilter = new ColorFiltering();

            L_brownFilter.Red   = new IntRange(125, 140);
            L_brownFilter.Green = new IntRange(95, 110);
            L_brownFilter.Blue  = new IntRange(110, 130);

            D_brownFilter.Red   = new IntRange(55, 85);
            D_brownFilter.Green = new IntRange(45, 75);
            D_brownFilter.Blue  = new IntRange(45, 75);


            blobFilter = new BlobsFiltering();
            blobFilter.CoupledSizeFiltering = true;
            blobFilter.MinWidth             = 70;
            blobFilter.MinHeight            = 70;

            diffFilter = new Difference();
            diffFilter.OverlayImage = back;

            thresholdFilter = new Threshold(40);

            erosionFilter = new Erosion();

            edgeFilter = new Edges();

            openFilter = new Opening();

            pixelFilter = new Pixellate();

            morphFilter = new Morph();
            morphFilter.SourcePercent = 0.9;

            towardsFilter          = new MoveTowards();
            towardsFilter.StepSize = 10;

            blobCounter = new BlobCounter();
            blobGrabber = new ExtractBiggestBlob();
        }
示例#8
0
        public static byte[] ApplyFilter(byte[] imageBytes, ImageProcessingFilters filter, ImageFormat format = null)
        {
            IFilter baseFilter = null;

            switch (filter)
            {
            case ImageProcessingFilters.Default:
                return(imageBytes);

            case ImageProcessingFilters.GrayscaleBT709:
                baseFilter = new GrayscaleBT709();
                break;

            case ImageProcessingFilters.GrayscaleRMY:
                baseFilter = new GrayscaleRMY();
                break;

            case ImageProcessingFilters.GrayscaleY:
                baseFilter = new GrayscaleY();
                break;

            case ImageProcessingFilters.BayerFilter:
                baseFilter = new FiltersSequence();
                ((FiltersSequence)baseFilter).Add(new ExtractChannel(RGB.B));
                ((FiltersSequence)baseFilter).Add(new BayerFilter());
                break;

            /*
             * case ImageProcessingFilters.ImageWarp:
             * baseFilter = new ImageWarp(
             * break;
             * */
            case ImageProcessingFilters.Channel_Red:
                baseFilter = new ExtractChannel(RGB.R);
                break;

            case ImageProcessingFilters.Channel_Green:
                baseFilter = new ExtractChannel(RGB.G);
                break;

            case ImageProcessingFilters.Channel_Blue:
                baseFilter = new ExtractChannel(RGB.B);
                break;

            case ImageProcessingFilters.WaterWave:
                baseFilter = new WaterWave();
                ((WaterWave)baseFilter).HorizontalWavesCount     = 10;
                ((WaterWave)baseFilter).HorizontalWavesAmplitude = 5;
                ((WaterWave)baseFilter).VerticalWavesCount       = 3;
                ((WaterWave)baseFilter).VerticalWavesAmplitude   = 15;
                break;

            case ImageProcessingFilters.Sepia:
                baseFilter = new Sepia();
                break;

            case ImageProcessingFilters.BrightnessCorrection:
                baseFilter = new BrightnessCorrection(-50);
                break;

            case ImageProcessingFilters.ContrastCorrection:
                baseFilter = new ContrastCorrection(15);
                break;

            case ImageProcessingFilters.SaturationCorrection1:
                baseFilter = new SaturationCorrection(-0.5f);
                break;

            case ImageProcessingFilters.SaturationCorrection2:
                baseFilter = new SaturationCorrection(-.25f);
                break;

            case ImageProcessingFilters.SaturationCorrection3:
                baseFilter = new SaturationCorrection(+0.5f);
                break;

            case ImageProcessingFilters.Invert:
                baseFilter = new Invert();
                break;

            case ImageProcessingFilters.Blur:
                baseFilter = new Blur();
                break;

            case ImageProcessingFilters.RotateChannels:
                baseFilter = new RotateChannels();
                break;

            case ImageProcessingFilters.RotateChannels2:
                baseFilter = new FiltersSequence();
                ((FiltersSequence)baseFilter).Add(new RotateChannels());
                ((FiltersSequence)baseFilter).Add(new RotateChannels());
                break;

            case ImageProcessingFilters.AdditiveNoise:
                IRandomNumberGenerator generator = new UniformGenerator(new Range(-50, 50));
                baseFilter = new AdditiveNoise(generator);
                break;

            case ImageProcessingFilters.GammaCorrection:
                baseFilter = new GammaCorrection(0.5);
                break;

            case ImageProcessingFilters.HistogramEqualization:
                baseFilter = new HistogramEqualization();
                break;

            case ImageProcessingFilters.OrderedDithering:
                byte[,] matrix = new byte[4, 4]
                {
                    { 95, 233, 127, 255 },
                    { 159, 31, 191, 63 },
                    { 111, 239, 79, 207 },
                    { 175, 47, 143, 15 }
                };
                baseFilter = new FiltersSequence();
                ((FiltersSequence)baseFilter).Add(new GrayscaleBT709());
                ((FiltersSequence)baseFilter).Add(new OrderedDithering(matrix));
                break;

            case ImageProcessingFilters.Pixallete:
                baseFilter = new Pixellate();
                break;

            case ImageProcessingFilters.SimplePosterization:
                baseFilter = new SimplePosterization();
                break;

            case ImageProcessingFilters.Texturer_Textile:
                baseFilter = new Texturer(new AForge.Imaging.Textures.TextileTexture(), 0.3, 0.7);
                break;

            case ImageProcessingFilters.Texturer_Cloud:
                baseFilter = new Texturer(new AForge.Imaging.Textures.CloudsTexture(), 0.3, 0.7);
                break;

            case ImageProcessingFilters.Texturer_Marble:
                baseFilter = new Texturer(new AForge.Imaging.Textures.MarbleTexture(), 0.3, 0.7);
                break;

            case ImageProcessingFilters.Texturer_Wood:
                baseFilter = new Texturer(new AForge.Imaging.Textures.WoodTexture(), 0.3, 0.7);
                break;

            case ImageProcessingFilters.Texturer_Labyrinth:
                baseFilter = new Texturer(new AForge.Imaging.Textures.LabyrinthTexture(), 0.3, 0.7);
                break;

            case ImageProcessingFilters.SobelEdgeDetector:
                baseFilter = new FiltersSequence();
                ((FiltersSequence)baseFilter).Add(new ExtractChannel(RGB.R));
                ((FiltersSequence)baseFilter).Add(new SobelEdgeDetector());
                break;

            case ImageProcessingFilters.SobelEdgeDetectorInvert:
                baseFilter = new FiltersSequence();
                ((FiltersSequence)baseFilter).Add(new ExtractChannel(RGB.R));
                ((FiltersSequence)baseFilter).Add(new SobelEdgeDetector());
                ((FiltersSequence)baseFilter).Add(new Invert());
                break;

            case ImageProcessingFilters.SobelEdgeDetectorSepia:
                baseFilter = new FiltersSequence();
                ((FiltersSequence)baseFilter).Add(new ExtractChannel(RGB.R));
                ((FiltersSequence)baseFilter).Add(new SobelEdgeDetector());
                ((FiltersSequence)baseFilter).Add(new GrayscaleToRGB());
                ((FiltersSequence)baseFilter).Add(new Sepia());
                break;

            case ImageProcessingFilters.SobelEdgeDetectorSepiaCanvas:
                baseFilter = new FiltersSequence();
                ((FiltersSequence)baseFilter).Add(new ExtractChannel(RGB.R));
                ((FiltersSequence)baseFilter).Add(new SobelEdgeDetector());
                ((FiltersSequence)baseFilter).Add(new GrayscaleToRGB());
                ((FiltersSequence)baseFilter).Add(new Sepia());
                ((FiltersSequence)baseFilter).Add(new SimplePosterization());
                ((FiltersSequence)baseFilter).Add(new Texturer(new AForge.Imaging.Textures.TextileTexture(), 0.3, 0.7));
                break;

            case ImageProcessingFilters.Drawing:
                baseFilter = new FiltersSequence();
                ((FiltersSequence)baseFilter).Add(new GrayscaleBT709());
                ((FiltersSequence)baseFilter).Add(new SobelEdgeDetector());
                ((FiltersSequence)baseFilter).Add(new Invert());
                ((FiltersSequence)baseFilter).Add(new SimplePosterization());
                break;

            case ImageProcessingFilters.DrawingSepia:
                baseFilter = new FiltersSequence();
                ((FiltersSequence)baseFilter).Add(new GrayscaleBT709());
                ((FiltersSequence)baseFilter).Add(new SobelEdgeDetector());
                ((FiltersSequence)baseFilter).Add(new Invert());
                ((FiltersSequence)baseFilter).Add(new SimplePosterization());
                ((FiltersSequence)baseFilter).Add(new GrayscaleToRGB());
                ((FiltersSequence)baseFilter).Add(new Sepia());
                break;

            case ImageProcessingFilters.OilCanvas:
                baseFilter = new FiltersSequence();
                ((FiltersSequence)baseFilter).Add(new SimplePosterization());
                ((FiltersSequence)baseFilter).Add(new Texturer(new AForge.Imaging.Textures.TextileTexture(), 0.3, 0.7));
                break;

            case ImageProcessingFilters.OilCanvasGray:
                baseFilter = new FiltersSequence();
                ((FiltersSequence)baseFilter).Add(new SimplePosterization());
                ((FiltersSequence)baseFilter).Add(new Texturer(new AForge.Imaging.Textures.TextileTexture(), 0.3, 0.7));
                ((FiltersSequence)baseFilter).Add(new GrayscaleBT709());
                break;

            case ImageProcessingFilters.OilCanvasSepia:
                baseFilter = new FiltersSequence();
                ((FiltersSequence)baseFilter).Add(new SimplePosterization());
                ((FiltersSequence)baseFilter).Add(new Texturer(new AForge.Imaging.Textures.TextileTexture(), 0.3, 0.7));
                ((FiltersSequence)baseFilter).Add(new Sepia());
                break;
            }

            if (baseFilter == null)
            {
                return(null);
            }

            return(ApplyFilter(imageBytes, baseFilter, format));
        }
        private void JpegLiveSource1LiveNotificationEvent(object sender, EventArgs e)
        {
            if (this.InvokeRequired)
            {
                if (OnMainThread)
                {
                    LiveContentEventArgs args = e as LiveContentEventArgs;
                    if (args != null && args.LiveContent != null)
                    {
                        // UI thread is too busy - discard this frame from display
                        args.LiveContent.Dispose();
                    }
                    return;
                }
                OnMainThread = true;
                // Make sure we execute on the UI thread before updating UI Controls
                BeginInvoke(new EventHandler(JpegLiveSource1LiveNotificationEvent), new[] { sender, e });
            }
            else
            {
                LiveContentEventArgs args = e as LiveContentEventArgs;
                if (args != null)
                {
                    if (args.LiveContent != null)
                    {
                        // Display the received JPEG
                        //textBoxLength.Text = "" + args.LiveContent.Content.Length;

                        int width  = args.LiveContent.Width;
                        int height = args.LiveContent.Height;

                        MemoryStream ms = new MemoryStream(args.LiveContent.Content);
                        //Bitmap newBitmap = testBox();
                        Bitmap newBitmap = new Bitmap(ms);

                        if (referenceBitmap == null)
                        {
                            referenceBitmap = newBitmap;
                        }

                        textBoxResolution.Text = "" + width + "x" + height;

                        if (pictureBoxOriginal.Size.Width != 0 && pictureBoxOriginal.Size.Height != 0)
                        {
                            if ((newBitmap.Width != pictureBoxOriginal.Width || newBitmap.Height != pictureBoxOriginal.Height))
                            {
                                pictureBoxOriginal.Image = new Bitmap(newBitmap, pictureBoxOriginal.Size);
                            }
                            else
                            {
                                pictureBoxOriginal.Image = newBitmap;
                            }
                        }

                        textBoxDecodingStatus.Text = args.LiveContent.HardwareDecodingStatus;



                        ms.Close();
                        ms.Dispose();

                        _count++;
                        textBoxCount.Text = "" + _count;

                        args.LiveContent.Dispose();



                        /// Star processing frame
                        grayImage            = gfilter.Apply(newBitmap);
                        pictureBoxGray.Image = grayImage;

                        try
                        {
                            if (counter == 4000)
                            {
                                counter    = 0;
                                background = null;
                                analyticsImageProcessing.resetBackground();
                            }

                            if (background == null)
                            {
                                background = analyticsImageProcessing.GetBackGound(grayImage);

                                /// Show mensaje
                                Bitmap   bitmap = new Bitmap(320, 240);
                                Graphics g      = Graphics.FromImage(bitmap);
                                g.FillRectangle(System.Drawing.Brushes.Black, 0, 0, bitmap.Width, bitmap.Height);
                                g.DrawString("Processing Background...", new Font(FontFamily.GenericMonospace, 12), Brushes.White, new PointF(20, pictureBoxOriginal.Height / 2 - 20));
                                g.Dispose();
                                pictureBoxBackgound.Image = new Bitmap(bitmap, pictureBoxOriginal.Size);
                                bitmap.Dispose();
                            }
                            else
                            {
                                pictureBoxBackgound.Image = background;
                                background2 = gfilter.Apply(background);

                                Bitmap backgroundMask = analyticsImageProcessing.diff(grayImage, background2);
                                pictureBox1.Image = backgroundMask;


                                // create filter
                                //   Median filter = new Median();
                                // apply the filter
                                //   Bitmap backgroundMask2 = filter.Apply(backgroundMask);

                                //     pictureBox2.Image = backgroundMask2;

                                // create filter
                                Pixellate pxfilter = new Pixellate(20);
                                // apply the filter
                                Bitmap result = pxfilter.Apply(backgroundMask);
                                //      pictureBox3.Image = result;

                                // create filter
                                Threshold thfilter = new Threshold(1);
                                // apply the filter
                                Bitmap result2 = thfilter.Apply(result);

                                pictureBox4.Image = result2;

                                ApplyMask appmask = new ApplyMask(result2);
                                foreground = appmask.Apply(newBitmap);
                            }


                            /// PocessImage
                            if (foreground != null)
                            {
                                Blob[] blobs = analyticsImageProcessing.GetBlobs(foreground, blobCounter);
                                textBoxMetadata.Text = metadataHandler.SendMetadataBox(blobs, _jpegLiveSource.Width, _jpegLiveSource.Height);
                                PaintHeatMap(blobs);
                                pictureBoxHeatmap.Image = bitmapHeatMap;

                                /// Debug tool


                                if (blobs[0] != null && blobs.Length > 0)
                                {
                                    blobCounter.ExtractBlobsImage(foreground, blobs[0], false);
                                    pictureBoxBlob1.Image = blobs[0].Image.ToManagedImage();
                                    textBoxAreaBlob1.Text = blobs[0].Area.ToString();
                                    textBoxXBlob1.Text    = blobs[0].CenterOfGravity.X.ToString();
                                    textBoxYBlob1.Text    = blobs[0].CenterOfGravity.Y.ToString();
                                }
                                else
                                {
                                    pictureBoxBlob1.Image = null;
                                }

                                if (blobs[1] != null && blobs.Length > 1)
                                {
                                    blobCounter.ExtractBlobsImage(foreground, blobs[1], false);
                                    pictureBoxBlob2.Image = blobs[1].Image.ToManagedImage();
                                    textBoxAreaBlob2.Text = blobs[1].Area.ToString();
                                    textBoxXBlob2.Text    = blobs[1].CenterOfGravity.X.ToString();
                                    textBoxYBlob2.Text    = blobs[1].CenterOfGravity.Y.ToString();
                                }
                                else
                                {
                                    pictureBoxBlob2.Image = null;
                                }

                                if (blobs[2] != null && blobs.Length > 2)
                                {
                                    blobCounter.ExtractBlobsImage(foreground, blobs[2], false);
                                    pictureBoxBlob3.Image = blobs[2].Image.ToManagedImage();
                                    textBoxAreaBlob3.Text = blobs[2].Area.ToString();
                                    textBoxXBlob3.Text    = blobs[2].CenterOfGravity.X.ToString();
                                    textBoxYBlob3.Text    = blobs[2].CenterOfGravity.Y.ToString();
                                }
                                else
                                {
                                    pictureBoxBlob3.Image = null;
                                }

                                if (blobs[3] != null && blobs.Length > 3)
                                {
                                    blobCounter.ExtractBlobsImage(foreground, blobs[3], false);
                                    pictureBoxBlob4.Image = blobs[3].Image.ToManagedImage();
                                    textBoxAreaBlob4.Text = blobs[3].Area.ToString();
                                    textBoxXBlob4.Text    = blobs[3].CenterOfGravity.X.ToString();
                                    textBoxYBlob4.Text    = blobs[3].CenterOfGravity.Y.ToString();
                                }
                                else
                                {
                                    pictureBoxBlob4.Image = null;
                                }
                            }
                        }
                        catch (Exception r)
                        {
                            Console.WriteLine(r.Message);
                        }
                    }
                    else if (args.Exception != null)
                    {
                        // Handle any exceptions occurred inside toolkit or on the communication to the VMS

                        Bitmap   bitmap = new Bitmap(320, 240);
                        Graphics g      = Graphics.FromImage(bitmap);
                        g.FillRectangle(System.Drawing.Brushes.Black, 0, 0, bitmap.Width, bitmap.Height);
                        g.DrawString("Connection lost to server ...", new Font(FontFamily.GenericMonospace, 12), Brushes.White, new PointF(20, pictureBoxOriginal.Height / 2 - 20));
                        g.Dispose();
                        pictureBoxOriginal.Image = new Bitmap(bitmap, pictureBoxOriginal.Size);
                        bitmap.Dispose();
                    }
                }
                OnMainThread = false;
            }
        }
示例#10
0
        public static Bitmap ToPixellate(this Bitmap iBitmap, int pixelSize)
        {
            Pixellate filter = new Pixellate(pixelSize);

            return(filter.Apply(iBitmap));
        }
示例#11
0
        /// <summary>
        /// Once we have a context, we are able to peform various setup routines
        /// that require us to connect to OpenGL.
        /// </summary>
        protected override void OnSetContext()
        {
            //fCamera1 = VideoTexture.CreateFromDeviceIndex(GI, 0, 640, 480);
            fCamera1 = VideoTexture.CreateFromDeviceIndex(GI, 0, true);
        
            // Turn off features that we don't need, and they 
            // just slow down video processing
            GI.Features.AlphaTest.Disable();
            GI.Features.Blend.Disable();
            GI.Features.DepthTest.Disable();
            GI.Features.Dither.Disable();
            GI.Features.Fog.Disable();
            GI.Features.Lighting.Disable();

            // Create the FaceMesh that will receive the gray texture so we 
            // can break it up into 8x8 squares
            fFaceSize = new Vector3D(fCamera1.Width, fCamera1.Height, 0);

            fBackgroundCreator = new RGBToGray(GI, fCamera1.Width, fCamera1.Height);
            fBackgroundCopier = new UnaryTextureProcessor(GI, fCamera1.Width, fCamera1.Height);
            fBackgroundPixellator = new Pixellate(GI, fCamera1.Width, fCamera1.Height, 8);
            
            fImageOperator = new UnaryTextureProcessor(GI, fCamera1.Width, fCamera1.Height);
            fGrayConverter = new RGBToGray(GI, fCamera1.Width, fCamera1.Height);
            fGammaCorrection = new PowerLawTransform(GI, fCamera1.Width, fCamera1.Height, 1.0f);
            fLuminanceThreshold = new LuminanceThreshold(GI, fCamera1.Width, fCamera1.Height, 0f);
            fLuminanceBinarizer = new LuminanceBinarizer(GI, fCamera1.Width, fCamera1.Height, 0.3f);
            fLuminanceBinarizer.OverColor = ColorRGBA.Black;
            fLuminanceBinarizer.UnderColor = ColorRGBA.White;
            fPixellate = new Pixellate(GI, fCamera1.Width, fCamera1.Height,8);
            fBlocker = new BlockProcessor(GI, fCamera1.Width, fCamera1.Height, 2);
            fAverager = new AverageProcessor(GI, fCamera1.Width, fCamera1.Height);
            fMorpher = new Morph(GI, fCamera1.Width, fCamera1.Height);

            fEdgeEnhance = new ConvolutionProcessor(GI, fCamera1.Width, fCamera1.Height, ConvolutionKernel.EdgeEnhance);
            fEdgeEnhance.Distance = 4;
            fEmboss = new ConvolutionProcessor(GI, fCamera1.Width, fCamera1.Height, ConvolutionKernel.Emboss);
            fSoften = new ConvolutionProcessor(GI, fCamera1.Width, fCamera1.Height, ConvolutionKernel.GaussianBlur);
            fSoften.Distance = 4;
            fSobell = new ConvolutionProcessor(GI, fCamera1.Width, fCamera1.Height, ConvolutionKernel.Sobell);
            fLaplacian = new ConvolutionProcessor(GI, fCamera1.Width, fCamera1.Height, ConvolutionKernel.Laplacian);

            fDifference = new DifferenceProcessor(GI, fCamera1.Width, fCamera1.Height);



            // We want replace mode because we don't care about
            // the values that are currently in place, we just want
            // to replace them.
            GI.TexEnv(TextureEnvModeParam.Replace);

            fGrayMesh = new XYAxesMesh(GI, new Vector3D(fCamera1.Width, fCamera1.Height, 0), new Resolution(2, 2), null);
            fPointMesh = new XYAxesPointMesh(GI, new Vector3D(fCamera1.Width, fCamera1.Height, 0), new Resolution(fCamera1.Width/4, fCamera1.Height/4), null);
        }
示例#12
0
        public static Bitmap ToPixalation(this Bitmap bitmap)
        {
            var pixellateFilter = new Pixellate();

            return(pixellateFilter.Apply(AForge.Imaging.Image.Clone(bitmap, PixelFormat.Format24bppRgb)));
        }
示例#13
0
        /// <summary>
        /// This is the method that actually does the work.
        /// </summary>
        /// <param name="DA">The DA object is used to retrieve from inputs and store in outputs.</param>
        protected override void SolveInstance(IGH_DataAccess DA)
        {
            IGH_Goo goo   = null;
            Image   image = new Image();

            if (!DA.GetData(0, ref goo))
            {
                return;
            }
            if (!goo.TryGetImage(ref image))
            {
                return;
            }

            int mode = 0;

            DA.GetData(1, ref mode);

            double numValA = 0;

            DA.GetData(2, ref numValA);

            double numValB = 0;

            DA.GetData(3, ref numValB);

            Filter filter = new Filter();

            switch ((FilterModes)mode)
            {
            case FilterModes.Additive:
                SetParameter(2);
                SetParameter(3);
                filter = new Additive();
                image.Filters.Add(new Additive());
                break;

            case FilterModes.Daube:
                SetParameter(2, "S", "Size", "[0-1] Unitized adjustment value");
                SetParameter(3);
                filter = new Daube(numValA);
                image.Filters.Add(new Daube(numValA));
                break;

            case FilterModes.SaltPepper:
                SetParameter(2, "N", "Noise", "[0-1] Unitized adjustment value");
                SetParameter(3);
                filter = new SaltPepper(numValA);
                image.Filters.Add(new SaltPepper(numValA));
                break;

            case FilterModes.Jitter:
                SetParameter(2, "R", "Radius", "[0-1] Unitized adjustment value");
                SetParameter(3);
                filter = new Jitter(numValA);
                image.Filters.Add(new Jitter(numValA));
                break;

            case FilterModes.Kuwahara:
                SetParameter(2, "S", "Size", "[0-1] Unitized adjustment value");
                SetParameter(3);
                filter = new Kuwahara(numValA);
                image.Filters.Add(new Kuwahara(numValA));
                break;

            case FilterModes.Posterize:
                SetParameter(2, "I", "Interval", "[0-1] Unitized adjustment value");
                SetParameter(3);
                filter = new Posterize(numValA);
                image.Filters.Add(new Posterize(numValA));
                break;

            case FilterModes.GaussianBlur:
                SetParameter(2, "X", "Sigma", "[0-1] Unitized adjustment value");
                SetParameter(3, "S", "Size", "[0-1] Unitized adjustment value");
                filter = new GaussianBlur(numValA, numValB);
                image.Filters.Add(new GaussianBlur(numValA, numValB));
                break;

            case FilterModes.Pixellate:
                SetParameter(2, "W", "Width", "[0-1] Unitized adjustment value");
                SetParameter(3, "H", "Height", "[0-1] Unitized adjustment value");
                filter = new Pixellate(numValA, numValB);
                image.Filters.Add(new Pixellate(numValA, numValB));
                break;

            case FilterModes.Blur:
                SetParameter(2, "D", "Divisor", "[0-1] Unitized adjustment value");
                SetParameter(3, "T", "Threshold", "[0-1] Unitized adjustment value");
                filter = new Blur(numValA, numValB);
                image.Filters.Add(new Blur(numValA, numValB));
                break;
            }

            message = ((FilterModes)mode).ToString();
            UpdateMessage();

            DA.SetData(0, image);
            DA.SetData(1, filter);
        }