Replace channel of RGB color space
Inheritance: IFilter, IInPlaceFilter
示例#1
0
        Bitmap processAll(string filename)
        {
            Bitmap SampleImage = (Bitmap)System.Drawing.Image.FromFile(filename);
            Bitmap rChannel = processImageCenterline(filename);
            Bitmap bChannel = processImageContour(filename);

            ReplaceChannel replaceRFilter = new ReplaceChannel(RGB.R, rChannel);
            ReplaceChannel replaceGFilter = new ReplaceChannel(RGB.G, rChannel);
            ReplaceChannel replaceBFilter = new ReplaceChannel(RGB.B, bChannel);

            replaceRFilter.ApplyInPlace(SampleImage);
            replaceGFilter.ApplyInPlace(SampleImage);

            replaceBFilter.ApplyInPlace(SampleImage);
            return SampleImage;
            //comment
        }
示例#2
0
        // Process new frame
        public void ProcessFrame( ref Bitmap image )
        {
            if ( backgroundFrame == null )
            {
                // create initial backgroung image
                backgroundFrame = processingFilter1.Apply( image );

                // get image dimension
                width	= image.Width;
                height	= image.Height;

                // just return for the first time
                return;
            }

            Bitmap tmpImage;

            // apply the the first filters sequence
            tmpImage = processingFilter1.Apply( image );

            if ( ++counter == 2 )
            {
                counter = 0;

                // move background towards current frame
                moveTowardsFilter.OverlayImage = tmpImage;
                moveTowardsFilter.ApplyInPlace( backgroundFrame );
            }

            // set backgroud frame as an overlay for difference filter
            differenceFilter.OverlayImage = backgroundFrame;

            // lock temporary image to apply several filters
            bitmapData = tmpImage.LockBits( new Rectangle( 0, 0, width, height ),
                ImageLockMode.ReadWrite, PixelFormat.Format8bppIndexed );

            // apply difference filter
            differenceFilter.ApplyInPlace( bitmapData );
            // apply threshold filter
            thresholdFilter.ApplyInPlace( bitmapData );
            // apply dilatation filter
            Bitmap tmpImage2 = dilatationFilter.Apply( bitmapData );

            // unlock temporary image
            tmpImage.UnlockBits( bitmapData );
            tmpImage.Dispose( );

            // calculate amount of changed pixels
            pixelsChanged = ( calculateMotionLevel ) ?
                CalculateWhitePixels( tmpImage2 ) : 0;

            // find edges
            Bitmap tmpImage2b = edgesFilter.Apply( tmpImage2 );
            tmpImage2.Dispose( );

            // extract red channel from the original image
            Bitmap redChannel = extrachChannel.Apply( image );

            //  merge red channel with moving object borders
            mergeFilter.OverlayImage = tmpImage2b;
            Bitmap tmpImage3 = mergeFilter.Apply( redChannel );
            redChannel.Dispose( );
            tmpImage2b.Dispose( );

            // replace red channel in the original image
            // Updated from original example to support new AForge Libraries (Sumit)
            if (replaceChannel == null)
            { replaceChannel = new ReplaceChannel(RGB.R, tmpImage3); }
            else
            { replaceChannel.ChannelImage = tmpImage3; }

            Bitmap tmpImage4 = replaceChannel.Apply( image );
            tmpImage3.Dispose( );

            image.Dispose( );
            image = tmpImage4;
        }
示例#3
0
        /// <summary>
        /// processes Frame for Motion Detection based on background generation
        /// </summary>
        /// <param name="frame">
        /// Takes in 2 Bitmap parameters, currentFrame and backgroundFrame
        /// </param>
        /// <returns>
        /// frame in which motion is marked
        /// </returns>
        public Bitmap processFrame(params Bitmap[] frame)
        {
            Bitmap currentFrame = frame[0];
            // create grayscale filter (BT709)
            Grayscale filter = new Grayscale(0.2125, 0.7154, 0.0721);
            Bitmap GScurrentFrame = filter.Apply(currentFrame);
            if (this.backgroundFrame == null)
            {
                this.backgroundFrame = (Bitmap)GScurrentFrame.Clone();
                GScurrentFrame.Dispose();
                return currentFrame;
            }
            else
            {
                Morph filterx = new Morph(GScurrentFrame);
                filterx.SourcePercent = 0.75;
                Bitmap tmp = filterx.Apply(backgroundFrame);
                // dispose old background
                backgroundFrame.Dispose();
                backgroundFrame = tmp;

                // create processing filters sequence
                FiltersSequence processingFilter = new FiltersSequence();
                processingFilter.Add(new Difference(backgroundFrame));
                processingFilter.Add(new Threshold(threshold_val));
                processingFilter.Add(new Opening());
                processingFilter.Add(new Edges());
                // apply the filter
                Bitmap tmp1 = processingFilter.Apply(GScurrentFrame);

                IFilter extractChannel = new ExtractChannel(RGB.R);
                Bitmap redChannel = extractChannel.Apply(currentFrame);
                Merge mergeFilter = new Merge();
                mergeFilter.OverlayImage = tmp1;
                Bitmap t3 = mergeFilter.Apply(redChannel);
                ReplaceChannel rc = new ReplaceChannel(RGB.R, t3);
                t3 = rc.Apply(currentFrame);
                redChannel.Dispose();
                tmp1.Dispose();
                GScurrentFrame.Dispose();
                return t3;
            }
        }
示例#4
0
		// Process new frame
		public void ProcessFrame( ref Bitmap image )
		{
			if ( backgroundFrame == null )
			{
				// create initial backgroung image
				backgroundFrame = grayscaleFilter.Apply( image );

				// get image dimension
				width	= image.Width;
				height	= image.Height;

				// just return for the first time
				return;
			}

			Bitmap tmpImage;

			// apply the grayscale file
			tmpImage = grayscaleFilter.Apply( image );

			// set backgroud frame as an overlay for difference filter
			differenceFilter.OverlayImage = backgroundFrame;

            // apply difference filter
            Bitmap tmpImage2 = differenceFilter.Apply( tmpImage );

            // lock the temporary image and apply some filters on the locked data
            bitmapData = tmpImage2.LockBits( new Rectangle( 0, 0, width, height ),
                ImageLockMode.ReadWrite, PixelFormat.Format8bppIndexed );

            // threshold filter
            thresholdFilter.ApplyInPlace( bitmapData );
            // erosion filter
            Bitmap tmpImage3 = erosionFilter.Apply( bitmapData );

            // unlock temporary image
            tmpImage2.UnlockBits( bitmapData );
            tmpImage2.Dispose( );

            // calculate amount of changed pixels
			pixelsChanged = ( calculateMotionLevel ) ?
				CalculateWhitePixels( tmpImage3 ) : 0;

			// dispose old background
			backgroundFrame.Dispose( );
			// set backgound to current
			backgroundFrame = tmpImage;

			// extract red channel from the original image
			Bitmap redChannel = extrachChannel.Apply( image );

			//  merge red channel with moving object
			mergeFilter.OverlayImage = tmpImage3;
			Bitmap tmpImage4 = mergeFilter.Apply( redChannel );
			redChannel.Dispose( );
			tmpImage3.Dispose( );

			// replace red channel in the original image
            if(replaceChannel == null)
                replaceChannel = new ReplaceChannel(RGB.R, tmpImage4);
            else
			    replaceChannel.ChannelImage = tmpImage4;
			Bitmap tmpImage5 = replaceChannel.Apply( image );
			tmpImage4.Dispose( );

			image.Dispose( );
			image = tmpImage5;
		}
示例#5
0
        void webcam_ImageCaptured_Back(object source, WebcamEventArgs e)
        {
            _FrameImage.Image = e.WebCamImage;
            Bitmap MaskImage = new Bitmap(640, 480);
            if (backgroundFrame == null)
            {
                Frames2Ignore--;
                if (Frames2Ignore == 0)
                {
                    backgroundFrame = (Bitmap)e.WebCamImage;
                    backgroundFrameGray = grayscaleFilter.Apply(backgroundFrame);
                }
                return;
            }

            //Save curent image
            CurrentFrame = (Bitmap)e.WebCamImage;
            CurrentFrameGray = grayscaleFilter.Apply(CurrentFrame);

            /*
            // create filter
            IFilter pixellateFilter = new Pixellate();
            // apply the filter
            backgroundFrame = pixellateFilter.Apply(backgroundFrame);
            backgroundFrameGray = grayscaleFilter.Apply(backgroundFrame);
            CurrentFrame = pixellateFilter.Apply(CurrentFrame);
            CurrentFrameGray = grayscaleFilter.Apply(CurrentFrame);*/

            MoveTowards moveTowardsFilter = new MoveTowards();
            moveTowardsFilter.OverlayImage = CurrentFrameGray;
            // move background towards current frame
            Bitmap tmp = moveTowardsFilter.Apply(backgroundFrameGray);
            // dispose old background
            backgroundFrame.Dispose();
            backgroundFrame = tmp;

            // create processing filters sequence

            FiltersSequence processingFilter = new FiltersSequence();
            processingFilter.Add(new Difference(backgroundFrameGray));
            processingFilter.Add(new Threshold(15));
            processingFilter.Add(new Opening());
            processingFilter.Add(new Edges());
            processingFilter.Add(new DifferenceEdgeDetector());

            // apply the filter

            Bitmap tmp1 = processingFilter.Apply(CurrentFrameGray);
            // extract red channel from the original image

            IFilter extrachChannel = new ExtractChannel(RGB.R);
            Bitmap redChannel = extrachChannel.Apply(backgroundFrame);

            //  merge red channel with moving object borders

            Merge mergeFilter = new Merge();
            mergeFilter.OverlayImage = tmp1;
            Bitmap tmp2 = mergeFilter.Apply(redChannel);
            // replace red channel in the original image

            ReplaceChannel replaceChannel = new ReplaceChannel(RGB.R,tmp2);
            replaceChannel.ChannelImage = tmp2;
            Bitmap tmp3 = replaceChannel.Apply(backgroundFrame);

            ConnectedComponentsLabeling CCL = new ConnectedComponentsLabeling();
            CCL.MinWidth = 75;
            CCL.MinHeight = 75;
            CCL.CoupledSizeFiltering = true;
            Bitmap tmp4 = CCL.Apply(tmp1);

            blobCounter.MinHeight = 75;
            blobCounter.MinWidth = 75;
            blobCounter.CoupledSizeFiltering = true;
            blobCounter.ProcessImage(tmp1);
            Blob[] blobs = blobCounter.GetObjects(tmp1);
            int maxSize = 0;
            Blob maxObject = new Blob(0, new Rectangle(0, 0, 0, 0));
            // find biggest blob
            Bitmap Masked = new Bitmap(320, 240);
            if (blobs != null)
            {
                foreach (Blob blob in blobs)
                {
                    int blobSize = blob.Rectangle.Width * blob.Rectangle.Height;

                    if (blobSize > maxSize)
                    {
                        maxSize = blobSize;
                        maxObject = blob;
                    }
                }

                for (int i = maxObject.Rectangle.Left; i < maxObject.Rectangle.Right; i++)
                {
                    for (int j = maxObject.Rectangle.Top; j < maxObject.Rectangle.Bottom; j++)
                    {
                        Masked.SetPixel(i, j, maxObject.Image.GetPixel(i - maxObject.Rectangle.Left, j - maxObject.Rectangle.Top));
                    }
                }
            }

            /*Bitmap Hor = new Bitmap(320, 240);
            Bitmap Ver = new Bitmap(320, 240);
            if (maxSize > 150)
            {
                AForge.Imaging.VerticalIntensityStatistics VIS = new VerticalIntensityStatistics(tmp1);
                int[] HistVer = VIS.Gray.Values;
                AForge.Imaging.HorizontalIntensityStatistics HIS = new HorizontalIntensityStatistics(tmp1);
                int[] HistHor = HIS.Gray.Values;

                for (int x=0;x<320;x++)
                    for (int y = 0; y < 240; y++)
                    {
                        Hor.SetPixel(x, y, Color.White);
                        Ver.SetPixel(x, y, Color.White);
                    }
                int Imax = -1, Max = -1;
                for (int i = 0; i < HistHor.Length; i++)
                {
                    for (int y = 0; y < ((double)(HistHor[i]) / 255) ; y++)
                        Hor.SetPixel(i, y, Color.Black);
                        if (HistHor[i] > 0)
                        {
                            Imax = i;
                            Max = HistHor[i];
                        }
                }
                int ImaxY = -1, MaxY = -1;
                for (int i = 0; i < HistVer.Length; i++)
                {
                    for (int x = 0; x < ((double)(HistVer[i]) / 255) ; x++)
                        Ver.SetPixel(x, i, Color.Black);
                    if (HistVer[i] > MaxY)
                    {
                        ImaxY = i;
                        MaxY = HistVer[i];
                    }
                }
            }

            */
               /* blobCounter.MinHeight = 75;
            blobCounter.MinWidth = 75;
            blobCounter.CoupledSizeFiltering = true;
            blobCounter.ProcessImage(tmp1);

            Blob[] blobs = blobCounter.GetObjects(tmp1);
            int maxSize = 0;
            Blob maxObject = new Blob(0, new Rectangle(0, 0, 0, 0));
            // find biggest blob
            if (blobs != null)
            {
                foreach (Blob blob in blobs)
                {
                    int blobSize = blob.Rectangle.Width * blob.Rectangle.Height;

                    if (blobSize > maxSize)
                    {
                        maxSize = blobSize;
                        maxObject = blob;
                    }
                }

                if (maxObject.Rectangle.Height > 90 && maxObject.Rectangle.Width > 30)
                {
                    AForge.Imaging.VerticalIntensityStatistics VIS = new VerticalIntensityStatistics(maxObject.Image);
                    int[] HistVer = VIS.Gray.Values;
                    AForge.Imaging.HorizontalIntensityStatistics HIS = new HorizontalIntensityStatistics(maxObject.Image);
                    int[] HistHor = HIS.Gray.Values;

                    int Imax = -1, Max = -1;
                    for (int i = 0; i < HistHor.Length; i++)
                    {
                        if (HistHor[i] > 0)
                        {
                            Imax = i;
                            Max = HistHor[i];
                            break;
                        }
                    }
                    int ImaxY = -1, MaxY = -1;
                    for (int i = 0; i < HistVer.Length; i++)
                    {
                        if (HistVer[i] > MaxY)
                        {
                            ImaxY = i;
                            MaxY = HistVer[i];
                        }
                    }
                    //Imax = 0;
                    ImaxY = 0;

                    Console.WriteLine("X={0},Y={1}", maxObject.Rectangle.X, maxObject.Rectangle.Y);
                    if (eChangedCursorEvent != null && maxSize != 0)
                        eChangedCursorEvent(maxObject.Rectangle.X + Imax, maxObject.Rectangle.Y + ImaxY);
                    LastX = maxObject.Rectangle.X;
                    LastY = maxObject.Rectangle.Y;
                }*/
                /*else if (LastX != -1 && LastY != -1 && maxSize > 0)
                {
                    //Calc distance from LastX,LastY
                    double distX = System.Math.Pow(maxObject.Rectangle.X - LastX, 2);
                    double distY = System.Math.Pow(maxObject.Rectangle.Y - LastY, 2);
                    double dist = System.Math.Pow(distX + distY, 0.5);
                    if (dist < 15)
                    {
                        Console.WriteLine("X={0},Y={1}", maxObject.Rectangle.X, maxObject.Rectangle.Y);
                        if (eChangedCursorEvent != null && maxSize != 0)
                            eChangedCursorEvent(maxObject.Rectangle.X, maxObject.Rectangle.Y);
                        LastX = maxObject.Rectangle.X;
                        LastY = maxObject.Rectangle.Y;
                    }
                    else
                    {
                        LastX = -1;
                        LastY = -1;
                    }
                }
                else
                {
                    LastX = -1;
                    LastY = -1;
                }*/
            //}
            _CaptureImage.Image = maxObject.Image;
            //_CaptureImage.Image = tmp3;
            _CaptureImage2.Image = tmp4;
            backgroundFrame = (Bitmap)e.WebCamImage;
            backgroundFrameGray = grayscaleFilter.Apply(backgroundFrame);
        }
示例#6
0
        void webcam_ImageCaptured(object source, WebcamEventArgs e)
        {
            _FrameImage.Image = e.WebCamImage;
            Bitmap MaskImage = new Bitmap(640, 480);
            if (backgroundFrame == null)
            {
                Frames2Ignore--;
                if (Frames2Ignore == 0)
                {
                    backgroundFrame = (Bitmap)e.WebCamImage;
                    backgroundFrameGray = grayscaleFilter.Apply(backgroundFrame);
                }
                return;
            }

            //Save curent image
            CurrentFrame = (Bitmap)e.WebCamImage;
            CurrentFrameGray = grayscaleFilter.Apply(CurrentFrame);

            /*
            // create filter
            IFilter pixellateFilter = new Pixellate();
            // apply the filter
            backgroundFrame = pixellateFilter.Apply(backgroundFrame);
            backgroundFrameGray = grayscaleFilter.Apply(backgroundFrame);
            CurrentFrame = pixellateFilter.Apply(CurrentFrame);
            CurrentFrameGray = grayscaleFilter.Apply(CurrentFrame);*/

            MoveTowards moveTowardsFilter = new MoveTowards();
            moveTowardsFilter.OverlayImage = CurrentFrameGray;
            // move background towards current frame
            Bitmap tmp = moveTowardsFilter.Apply(backgroundFrameGray);
            // dispose old background
            backgroundFrame.Dispose();
            backgroundFrame = tmp;

            // create processing filters sequence

            FiltersSequence processingFilter = new FiltersSequence();
            processingFilter.Add(new Difference(backgroundFrameGray));
            processingFilter.Add(new Threshold(15));
            processingFilter.Add(new Opening());
            processingFilter.Add(new Edges());
            processingFilter.Add(new DifferenceEdgeDetector());

            // apply the filter

            Bitmap tmp1 = processingFilter.Apply(CurrentFrameGray);
            // extract red channel from the original image

            IFilter extrachChannel = new ExtractChannel(RGB.R);
            Bitmap redChannel = extrachChannel.Apply(backgroundFrame);

            //  merge red channel with moving object borders

            Merge mergeFilter = new Merge();
            mergeFilter.OverlayImage = tmp1;
            Bitmap tmp2 = mergeFilter.Apply(redChannel);
            // replace red channel in the original image

            ReplaceChannel replaceChannel = new ReplaceChannel(RGB.R, tmp2);
            replaceChannel.ChannelImage = tmp2;
            Bitmap tmp3 = replaceChannel.Apply(backgroundFrame);
            StateMgr.Execute(tmp1);
            if (eChangedCursorEvent != null && StateMgr.Val == Webcam.ValidLocation.TRUE)
            {
                //Console.WriteLine("X={0} , Y={1}", StateMgr.CurrState.CurrX, StateMgr.CurrState.CurrY);
                eChangedCursorEvent(StateMgr.CurrState.CurrX, StateMgr.CurrState.CurrY);
                for (int i = -4; i <= 4; i++)
                    for (int j = -4; j <= 4;j++ )
                        tmp3.SetPixel(StateMgr.CurrState.CurrX+i, StateMgr.CurrState.CurrY+j, Color.Blue);
                //eChangedCursorEvent(StateMgr.CurrState.CurrX, 100);
                //eChangedCursorEvent(100, StateMgr.CurrState.CurrY);
            }
            _CaptureImage.Image = tmp1;
            _CaptureImage2.Image = tmp3;
            backgroundFrame = (Bitmap)e.WebCamImage;
            backgroundFrameGray = grayscaleFilter.Apply(backgroundFrame);
        }
 /// <summary>
 /// processes Frame for Motion Detection based on frame comparison
 /// </summary>
 /// <param name="frame">
 /// Takes in 2 Bitmap parameters, currentFrame and backgroundFrame
 /// </param>
 /// <returns>
 /// frame in which motion is marked
 /// </returns>
 public Bitmap processFrame(params Bitmap[] frame)
 {
     Bitmap currentFrame = frame[0];
     // create grayscale filter (BT709)
     Grayscale filter = new Grayscale(0.2125, 0.7154, 0.0721);
     Bitmap GScurrentFrame = filter.Apply(currentFrame);
     if (this.backgroundFrame == null)
     {
         this.backgroundFrame = (Bitmap)GScurrentFrame.Clone();
         GScurrentFrame.Dispose();
         return currentFrame;
     }
     else
     {
         Difference differenceFilter = new Difference();
         IFilter thresholdFilter = new Threshold(15);
         // set backgroud frame as an overlay for difference filter
         differenceFilter.OverlayImage = backgroundFrame;
         Bitmap tmp = thresholdFilter.Apply(differenceFilter.Apply(GScurrentFrame));
         //reduce noise
         IFilter erosionFilter = new Erosion();
         Bitmap tmp1 = erosionFilter.Apply(tmp);
         tmp.Dispose();
         // Highlight Motions
         IFilter extractChannel = new ExtractChannel(RGB.G);
         Bitmap redChannel = extractChannel.Apply(currentFrame);
         Merge mergeFilter = new Merge();
         mergeFilter.OverlayImage = tmp1;
         Bitmap t3 = mergeFilter.Apply(redChannel);
         ReplaceChannel rc = new ReplaceChannel(RGB.G, t3);
         t3 = rc.Apply(currentFrame);
         this.backgroundFrame = (Bitmap)GScurrentFrame.Clone();
         redChannel.Dispose();
         tmp1.Dispose();
         GScurrentFrame.Dispose();
         return t3;
     }
 }