Extract RGB channel from image.

Extracts specified channel of color image and returns it as grayscale image.

The filter accepts 24, 32, 48 and 64 bpp color images and produces 8 (if source is 24 or 32 bpp image) or 16 (if source is 48 or 64 bpp image) bpp grayscale image.

Sample usage:

// create filter ExtractChannel filter = new ExtractChannel( RGB.G ); // apply the filter Bitmap channelImage = filter.Apply( image );

Initial image:

Result image:

Inheritance: BaseFilter
Esempio n. 1
2
        private Bitmap Flatten(Bitmap bmp, int fillint, int contint)
        {
            ColorFiltering colorFilter = new ColorFiltering();

            colorFilter.Red = new IntRange(0, fillint);
            colorFilter.Green = new IntRange(0, fillint);
            colorFilter.Blue = new IntRange(0, fillint);
            colorFilter.FillOutsideRange = false;
            using (Bitmap filteredBmp = colorFilter.Apply(bmp))
            {
                AForge.Imaging.Filters.ContrastCorrection Contrast = new ContrastCorrection(contint);
                AForge.Imaging.Filters.Invert invert = new Invert();
                AForge.Imaging.Filters.ExtractChannel extract_channel = new ExtractChannel(RGB.B);
                AForge.Imaging.Filters.Threshold thresh_hold = new Threshold(44);

                Contrast.ApplyInPlace(filteredBmp);

                Bitmap extractedBmp = extract_channel.Apply(filteredBmp);
                thresh_hold.ApplyInPlace(extractedBmp);
                invert.ApplyInPlace(extractedBmp);

                return extractedBmp;
            }
        }
Esempio n. 2
0
        public TestPanel()
        {
            InitializeComponent();
            selected = redFilter;

            for (int x = 0; x < 187; x++)
            {
                comboBox1.Items.Add(x);
            }

            brownFilter.Red = new IntRange(55,85);
            brownFilter.Green = new IntRange(45,75);
            brownFilter.Blue = new IntRange(45,75);

            comboBox2.Items.Add("Re");
            comboBox2.Items.Add("Gr");
            comboBox2.Items.Add("Bl");
        }
Esempio n. 3
0
        private void comboBox2_SelectedIndexChanged(object sender, EventArgs e)
        {
            switch (comboBox2.SelectedIndex)
            {
                case 0:
                    selected = redFilter;
                    break;
                case 1:
                    selected = greenFilter;
                    break;
                case 2:
                    selected = blueFilter;
                    break;
                default:
                    break;

            }
            Bitmap channel = selected.Apply((Bitmap)pictureBox1.Image);
            thresholdFilter = new Threshold(Int32.Parse(textBox1.Text));
            thresholdFilter.ApplyInPlace(channel);
            pictureBox2.Image = channel;
        }
        public void Video_NewFrame(object sender, NewFrameEventArgs eventArgs)
        {
            UnmanagedImage image = UnmanagedImage.FromManagedImage((Bitmap) eventArgs.Frame.Clone());

            var extractChannel = new ExtractChannel(RGB.R);
            UnmanagedImage channel = extractChannel.Apply(image);
            //		UnmanagedImage originalRed = channel.Clone();
            if (true) {
                var threshold = new Threshold(200);
                threshold.ApplyInPlace(channel);

                ////filter to convert RGB image to 8bpp gray scale for image processing
                //IFilter gray_filter = new GrayscaleBT709();
                //gray_image = gray_filter.Apply(gray_image);

                ////thresholding a image
                //Threshold th_filter = new Threshold(color_data.threshold);
                //th_filter.ApplyInPlace(gray_image);

                //erosion filter to filter out small unwanted pixels
                Erosion3x3 erosion = new Erosion3x3();
                erosion.ApplyInPlace(channel);

                //dilation filter
                //Dilatation3x3 dilatation = new Dilatation3x3();
                //dilatation.ApplyInPlace(channel);

                //GrayscaleToRGB filter = new GrayscaleToRGB();
                //image = filter.Apply(channel);

                //ReplaceChannel replaceFilter = new ReplaceChannel(RGB.B, channel);
                //replaceFilter.ApplyInPlace(image);
            }

            BlobCounter bc = new BlobCounter();
            //arrange blobs by area
            bc.ObjectsOrder = ObjectsOrder.Area;
            bc.FilterBlobs = true;
            bc.MinHeight = minObjectSize;
            bc.MinWidth = minObjectSize;
            bc.MaxHeight = maxObjectSize;
            bc.MaxWidth = maxObjectSize;

            //process image for blobs
            bc.ProcessImage(channel);
            channel.Dispose();

            //	if (motionDetector.ProcessFrame(image) > 0.02) {
            //	for (int i = 0; i < blobCountingObjectsProcessing.ObjectRectangles.Length; i++) {
            Rectangle[] rectangles = bc.GetObjectsRectangles();
            Blob[] blobs = bc.GetObjectsInformation();
            for (int i = 0; i < bc.ObjectsCount; i++) {
                Rectangle rectangle = rectangles[i];
                int width = rectangle.Width;
                int height = rectangle.Height;

            //		if (width < maxObjectSize && height < maxObjectSize && width > minObjectSize && height > minObjectSize) {
                    Drawing.Rectangle(image, rectangle, colorList[i % colorList.Length]);

                    if (i == 0) {
                        Position = GetCenterOfMass(image, rectangle);
                        Drawing.FillRectangle(image, rectangle, Color.BlanchedAlmond);
                        Drawing.FillRectangle(image, new Rectangle((int) Position.U - dotSize, (int) Position.V - dotSize, dotSize * 3, dotSize * 3), Color.Indigo);
                    }
            //		}
            }
            //	}

            Image = image.ToManagedImage();
            //	videoForm.ImageDestination.Image = image.ToManagedImage();
        }
Esempio n. 5
0
        /// <summary>
        /// processes Frame for Motion Detection based on background generation
        /// </summary>
        /// <param name="frame">
        /// Takes in 2 Bitmap parameters, currentFrame and backgroundFrame
        /// </param>
        /// <returns>
        /// frame in which motion is marked
        /// </returns>
        public Bitmap processFrame(params Bitmap[] frame)
        {
            Bitmap currentFrame = frame[0];
            // create grayscale filter (BT709)
            Grayscale filter = new Grayscale(0.2125, 0.7154, 0.0721);
            Bitmap GScurrentFrame = filter.Apply(currentFrame);
            if (this.backgroundFrame == null)
            {
                this.backgroundFrame = (Bitmap)GScurrentFrame.Clone();
                GScurrentFrame.Dispose();
                return currentFrame;
            }
            else
            {
                Morph filterx = new Morph(GScurrentFrame);
                filterx.SourcePercent = 0.75;
                Bitmap tmp = filterx.Apply(backgroundFrame);
                // dispose old background
                backgroundFrame.Dispose();
                backgroundFrame = tmp;

                // create processing filters sequence
                FiltersSequence processingFilter = new FiltersSequence();
                processingFilter.Add(new Difference(backgroundFrame));
                processingFilter.Add(new Threshold(threshold_val));
                processingFilter.Add(new Opening());
                processingFilter.Add(new Edges());
                // apply the filter
                Bitmap tmp1 = processingFilter.Apply(GScurrentFrame);

                IFilter extractChannel = new ExtractChannel(RGB.R);
                Bitmap redChannel = extractChannel.Apply(currentFrame);
                Merge mergeFilter = new Merge();
                mergeFilter.OverlayImage = tmp1;
                Bitmap t3 = mergeFilter.Apply(redChannel);
                ReplaceChannel rc = new ReplaceChannel(RGB.R, t3);
                t3 = rc.Apply(currentFrame);
                redChannel.Dispose();
                tmp1.Dispose();
                GScurrentFrame.Dispose();
                return t3;
            }
        }
Esempio n. 6
0
        private void UpdateChannelPreviews(System.Drawing.Bitmap pic)
        {
            ExtractChannel r = new ExtractChannel(RGB.R);
            ExtractChannel g = new ExtractChannel(RGB.G);
            ExtractChannel b = new ExtractChannel(RGB.B);

            System.Drawing.Bitmap rImage = null;
            System.Drawing.Bitmap gImage = null;
            System.Drawing.Bitmap bImage = null;

            // apply the filter
            if(pic != null)
            {
                rImage = r.Apply(pic);
                gImage = g.Apply(pic);
                bImage = b.Apply(pic);
            }
            else
            {
                rImage = r.Apply(mainPhoto);
                gImage = g.Apply(mainPhoto);
                bImage = b.Apply(mainPhoto);
            }

            RedChannelbmp = ToBitmapImage(rImage);
            GreenChannelbmp = ToBitmapImage(gImage);
            BlueChannelbmp = ToBitmapImage(bImage);
        }
Esempio n. 7
0
        void webcam_ImageCaptured_Back(object source, WebcamEventArgs e)
        {
            _FrameImage.Image = e.WebCamImage;
            Bitmap MaskImage = new Bitmap(640, 480);
            if (backgroundFrame == null)
            {
                Frames2Ignore--;
                if (Frames2Ignore == 0)
                {
                    backgroundFrame = (Bitmap)e.WebCamImage;
                    backgroundFrameGray = grayscaleFilter.Apply(backgroundFrame);
                }
                return;
            }

            //Save curent image
            CurrentFrame = (Bitmap)e.WebCamImage;
            CurrentFrameGray = grayscaleFilter.Apply(CurrentFrame);

            /*
            // create filter
            IFilter pixellateFilter = new Pixellate();
            // apply the filter
            backgroundFrame = pixellateFilter.Apply(backgroundFrame);
            backgroundFrameGray = grayscaleFilter.Apply(backgroundFrame);
            CurrentFrame = pixellateFilter.Apply(CurrentFrame);
            CurrentFrameGray = grayscaleFilter.Apply(CurrentFrame);*/

            MoveTowards moveTowardsFilter = new MoveTowards();
            moveTowardsFilter.OverlayImage = CurrentFrameGray;
            // move background towards current frame
            Bitmap tmp = moveTowardsFilter.Apply(backgroundFrameGray);
            // dispose old background
            backgroundFrame.Dispose();
            backgroundFrame = tmp;

            // create processing filters sequence

            FiltersSequence processingFilter = new FiltersSequence();
            processingFilter.Add(new Difference(backgroundFrameGray));
            processingFilter.Add(new Threshold(15));
            processingFilter.Add(new Opening());
            processingFilter.Add(new Edges());
            processingFilter.Add(new DifferenceEdgeDetector());

            // apply the filter

            Bitmap tmp1 = processingFilter.Apply(CurrentFrameGray);
            // extract red channel from the original image

            IFilter extrachChannel = new ExtractChannel(RGB.R);
            Bitmap redChannel = extrachChannel.Apply(backgroundFrame);

            //  merge red channel with moving object borders

            Merge mergeFilter = new Merge();
            mergeFilter.OverlayImage = tmp1;
            Bitmap tmp2 = mergeFilter.Apply(redChannel);
            // replace red channel in the original image

            ReplaceChannel replaceChannel = new ReplaceChannel(RGB.R,tmp2);
            replaceChannel.ChannelImage = tmp2;
            Bitmap tmp3 = replaceChannel.Apply(backgroundFrame);

            ConnectedComponentsLabeling CCL = new ConnectedComponentsLabeling();
            CCL.MinWidth = 75;
            CCL.MinHeight = 75;
            CCL.CoupledSizeFiltering = true;
            Bitmap tmp4 = CCL.Apply(tmp1);

            blobCounter.MinHeight = 75;
            blobCounter.MinWidth = 75;
            blobCounter.CoupledSizeFiltering = true;
            blobCounter.ProcessImage(tmp1);
            Blob[] blobs = blobCounter.GetObjects(tmp1);
            int maxSize = 0;
            Blob maxObject = new Blob(0, new Rectangle(0, 0, 0, 0));
            // find biggest blob
            Bitmap Masked = new Bitmap(320, 240);
            if (blobs != null)
            {
                foreach (Blob blob in blobs)
                {
                    int blobSize = blob.Rectangle.Width * blob.Rectangle.Height;

                    if (blobSize > maxSize)
                    {
                        maxSize = blobSize;
                        maxObject = blob;
                    }
                }

                for (int i = maxObject.Rectangle.Left; i < maxObject.Rectangle.Right; i++)
                {
                    for (int j = maxObject.Rectangle.Top; j < maxObject.Rectangle.Bottom; j++)
                    {
                        Masked.SetPixel(i, j, maxObject.Image.GetPixel(i - maxObject.Rectangle.Left, j - maxObject.Rectangle.Top));
                    }
                }
            }

            /*Bitmap Hor = new Bitmap(320, 240);
            Bitmap Ver = new Bitmap(320, 240);
            if (maxSize > 150)
            {
                AForge.Imaging.VerticalIntensityStatistics VIS = new VerticalIntensityStatistics(tmp1);
                int[] HistVer = VIS.Gray.Values;
                AForge.Imaging.HorizontalIntensityStatistics HIS = new HorizontalIntensityStatistics(tmp1);
                int[] HistHor = HIS.Gray.Values;

                for (int x=0;x<320;x++)
                    for (int y = 0; y < 240; y++)
                    {
                        Hor.SetPixel(x, y, Color.White);
                        Ver.SetPixel(x, y, Color.White);
                    }
                int Imax = -1, Max = -1;
                for (int i = 0; i < HistHor.Length; i++)
                {
                    for (int y = 0; y < ((double)(HistHor[i]) / 255) ; y++)
                        Hor.SetPixel(i, y, Color.Black);
                        if (HistHor[i] > 0)
                        {
                            Imax = i;
                            Max = HistHor[i];
                        }
                }
                int ImaxY = -1, MaxY = -1;
                for (int i = 0; i < HistVer.Length; i++)
                {
                    for (int x = 0; x < ((double)(HistVer[i]) / 255) ; x++)
                        Ver.SetPixel(x, i, Color.Black);
                    if (HistVer[i] > MaxY)
                    {
                        ImaxY = i;
                        MaxY = HistVer[i];
                    }
                }
            }

            */
               /* blobCounter.MinHeight = 75;
            blobCounter.MinWidth = 75;
            blobCounter.CoupledSizeFiltering = true;
            blobCounter.ProcessImage(tmp1);

            Blob[] blobs = blobCounter.GetObjects(tmp1);
            int maxSize = 0;
            Blob maxObject = new Blob(0, new Rectangle(0, 0, 0, 0));
            // find biggest blob
            if (blobs != null)
            {
                foreach (Blob blob in blobs)
                {
                    int blobSize = blob.Rectangle.Width * blob.Rectangle.Height;

                    if (blobSize > maxSize)
                    {
                        maxSize = blobSize;
                        maxObject = blob;
                    }
                }

                if (maxObject.Rectangle.Height > 90 && maxObject.Rectangle.Width > 30)
                {
                    AForge.Imaging.VerticalIntensityStatistics VIS = new VerticalIntensityStatistics(maxObject.Image);
                    int[] HistVer = VIS.Gray.Values;
                    AForge.Imaging.HorizontalIntensityStatistics HIS = new HorizontalIntensityStatistics(maxObject.Image);
                    int[] HistHor = HIS.Gray.Values;

                    int Imax = -1, Max = -1;
                    for (int i = 0; i < HistHor.Length; i++)
                    {
                        if (HistHor[i] > 0)
                        {
                            Imax = i;
                            Max = HistHor[i];
                            break;
                        }
                    }
                    int ImaxY = -1, MaxY = -1;
                    for (int i = 0; i < HistVer.Length; i++)
                    {
                        if (HistVer[i] > MaxY)
                        {
                            ImaxY = i;
                            MaxY = HistVer[i];
                        }
                    }
                    //Imax = 0;
                    ImaxY = 0;

                    Console.WriteLine("X={0},Y={1}", maxObject.Rectangle.X, maxObject.Rectangle.Y);
                    if (eChangedCursorEvent != null && maxSize != 0)
                        eChangedCursorEvent(maxObject.Rectangle.X + Imax, maxObject.Rectangle.Y + ImaxY);
                    LastX = maxObject.Rectangle.X;
                    LastY = maxObject.Rectangle.Y;
                }*/
                /*else if (LastX != -1 && LastY != -1 && maxSize > 0)
                {
                    //Calc distance from LastX,LastY
                    double distX = System.Math.Pow(maxObject.Rectangle.X - LastX, 2);
                    double distY = System.Math.Pow(maxObject.Rectangle.Y - LastY, 2);
                    double dist = System.Math.Pow(distX + distY, 0.5);
                    if (dist < 15)
                    {
                        Console.WriteLine("X={0},Y={1}", maxObject.Rectangle.X, maxObject.Rectangle.Y);
                        if (eChangedCursorEvent != null && maxSize != 0)
                            eChangedCursorEvent(maxObject.Rectangle.X, maxObject.Rectangle.Y);
                        LastX = maxObject.Rectangle.X;
                        LastY = maxObject.Rectangle.Y;
                    }
                    else
                    {
                        LastX = -1;
                        LastY = -1;
                    }
                }
                else
                {
                    LastX = -1;
                    LastY = -1;
                }*/
            //}
            _CaptureImage.Image = maxObject.Image;
            //_CaptureImage.Image = tmp3;
            _CaptureImage2.Image = tmp4;
            backgroundFrame = (Bitmap)e.WebCamImage;
            backgroundFrameGray = grayscaleFilter.Apply(backgroundFrame);
        }
Esempio n. 8
0
        void webcam_ImageCaptured(object source, WebcamEventArgs e)
        {
            _FrameImage.Image = e.WebCamImage;
            Bitmap MaskImage = new Bitmap(640, 480);
            if (backgroundFrame == null)
            {
                Frames2Ignore--;
                if (Frames2Ignore == 0)
                {
                    backgroundFrame = (Bitmap)e.WebCamImage;
                    backgroundFrameGray = grayscaleFilter.Apply(backgroundFrame);
                }
                return;
            }

            //Save curent image
            CurrentFrame = (Bitmap)e.WebCamImage;
            CurrentFrameGray = grayscaleFilter.Apply(CurrentFrame);

            /*
            // create filter
            IFilter pixellateFilter = new Pixellate();
            // apply the filter
            backgroundFrame = pixellateFilter.Apply(backgroundFrame);
            backgroundFrameGray = grayscaleFilter.Apply(backgroundFrame);
            CurrentFrame = pixellateFilter.Apply(CurrentFrame);
            CurrentFrameGray = grayscaleFilter.Apply(CurrentFrame);*/

            MoveTowards moveTowardsFilter = new MoveTowards();
            moveTowardsFilter.OverlayImage = CurrentFrameGray;
            // move background towards current frame
            Bitmap tmp = moveTowardsFilter.Apply(backgroundFrameGray);
            // dispose old background
            backgroundFrame.Dispose();
            backgroundFrame = tmp;

            // create processing filters sequence

            FiltersSequence processingFilter = new FiltersSequence();
            processingFilter.Add(new Difference(backgroundFrameGray));
            processingFilter.Add(new Threshold(15));
            processingFilter.Add(new Opening());
            processingFilter.Add(new Edges());
            processingFilter.Add(new DifferenceEdgeDetector());

            // apply the filter

            Bitmap tmp1 = processingFilter.Apply(CurrentFrameGray);
            // extract red channel from the original image

            IFilter extrachChannel = new ExtractChannel(RGB.R);
            Bitmap redChannel = extrachChannel.Apply(backgroundFrame);

            //  merge red channel with moving object borders

            Merge mergeFilter = new Merge();
            mergeFilter.OverlayImage = tmp1;
            Bitmap tmp2 = mergeFilter.Apply(redChannel);
            // replace red channel in the original image

            ReplaceChannel replaceChannel = new ReplaceChannel(RGB.R, tmp2);
            replaceChannel.ChannelImage = tmp2;
            Bitmap tmp3 = replaceChannel.Apply(backgroundFrame);
            StateMgr.Execute(tmp1);
            if (eChangedCursorEvent != null && StateMgr.Val == Webcam.ValidLocation.TRUE)
            {
                //Console.WriteLine("X={0} , Y={1}", StateMgr.CurrState.CurrX, StateMgr.CurrState.CurrY);
                eChangedCursorEvent(StateMgr.CurrState.CurrX, StateMgr.CurrState.CurrY);
                for (int i = -4; i <= 4; i++)
                    for (int j = -4; j <= 4;j++ )
                        tmp3.SetPixel(StateMgr.CurrState.CurrX+i, StateMgr.CurrState.CurrY+j, Color.Blue);
                //eChangedCursorEvent(StateMgr.CurrState.CurrX, 100);
                //eChangedCursorEvent(100, StateMgr.CurrState.CurrY);
            }
            _CaptureImage.Image = tmp1;
            _CaptureImage2.Image = tmp3;
            backgroundFrame = (Bitmap)e.WebCamImage;
            backgroundFrameGray = grayscaleFilter.Apply(backgroundFrame);
        }
 /// <summary>
 /// processes Frame for Motion Detection based on frame comparison
 /// </summary>
 /// <param name="frame">
 /// Takes in 2 Bitmap parameters, currentFrame and backgroundFrame
 /// </param>
 /// <returns>
 /// frame in which motion is marked
 /// </returns>
 public Bitmap processFrame(params Bitmap[] frame)
 {
     Bitmap currentFrame = frame[0];
     // create grayscale filter (BT709)
     Grayscale filter = new Grayscale(0.2125, 0.7154, 0.0721);
     Bitmap GScurrentFrame = filter.Apply(currentFrame);
     if (this.backgroundFrame == null)
     {
         this.backgroundFrame = (Bitmap)GScurrentFrame.Clone();
         GScurrentFrame.Dispose();
         return currentFrame;
     }
     else
     {
         Difference differenceFilter = new Difference();
         IFilter thresholdFilter = new Threshold(15);
         // set backgroud frame as an overlay for difference filter
         differenceFilter.OverlayImage = backgroundFrame;
         Bitmap tmp = thresholdFilter.Apply(differenceFilter.Apply(GScurrentFrame));
         //reduce noise
         IFilter erosionFilter = new Erosion();
         Bitmap tmp1 = erosionFilter.Apply(tmp);
         tmp.Dispose();
         // Highlight Motions
         IFilter extractChannel = new ExtractChannel(RGB.G);
         Bitmap redChannel = extractChannel.Apply(currentFrame);
         Merge mergeFilter = new Merge();
         mergeFilter.OverlayImage = tmp1;
         Bitmap t3 = mergeFilter.Apply(redChannel);
         ReplaceChannel rc = new ReplaceChannel(RGB.G, t3);
         t3 = rc.Apply(currentFrame);
         this.backgroundFrame = (Bitmap)GScurrentFrame.Clone();
         redChannel.Dispose();
         tmp1.Dispose();
         GScurrentFrame.Dispose();
         return t3;
     }
 }