Bitmap prcessImageContourCenterLine(string filename)
 {
     Merge filter = new Merge(processImageCenterline(filename));
     Bitmap resultImage = filter.Apply(processImageContour(filename));
     Invert filterInvert = new Invert();
     filterInvert.ApplyInPlace(resultImage);
     return resultImage;
 }
Beispiel #2
0
        /// <summary>
        /// processes Frame for Motion Detection based on background generation
        /// </summary>
        /// <param name="frame">
        /// Takes in 2 Bitmap parameters, currentFrame and backgroundFrame
        /// </param>
        /// <returns>
        /// frame in which motion is marked
        /// </returns>
        public Bitmap processFrame(params Bitmap[] frame)
        {
            Bitmap currentFrame = frame[0];
            // create grayscale filter (BT709)
            Grayscale filter = new Grayscale(0.2125, 0.7154, 0.0721);
            Bitmap GScurrentFrame = filter.Apply(currentFrame);
            if (this.backgroundFrame == null)
            {
                this.backgroundFrame = (Bitmap)GScurrentFrame.Clone();
                GScurrentFrame.Dispose();
                return currentFrame;
            }
            else
            {
                Morph filterx = new Morph(GScurrentFrame);
                filterx.SourcePercent = 0.75;
                Bitmap tmp = filterx.Apply(backgroundFrame);
                // dispose old background
                backgroundFrame.Dispose();
                backgroundFrame = tmp;

                // create processing filters sequence
                FiltersSequence processingFilter = new FiltersSequence();
                processingFilter.Add(new Difference(backgroundFrame));
                processingFilter.Add(new Threshold(threshold_val));
                processingFilter.Add(new Opening());
                processingFilter.Add(new Edges());
                // apply the filter
                Bitmap tmp1 = processingFilter.Apply(GScurrentFrame);

                IFilter extractChannel = new ExtractChannel(RGB.R);
                Bitmap redChannel = extractChannel.Apply(currentFrame);
                Merge mergeFilter = new Merge();
                mergeFilter.OverlayImage = tmp1;
                Bitmap t3 = mergeFilter.Apply(redChannel);
                ReplaceChannel rc = new ReplaceChannel(RGB.R, t3);
                t3 = rc.Apply(currentFrame);
                redChannel.Dispose();
                tmp1.Dispose();
                GScurrentFrame.Dispose();
                return t3;
            }
        }
Beispiel #3
0
        void webcam_ImageCaptured(object source, WebcamEventArgs e)
        {
            _FrameImage.Image = e.WebCamImage;
            Bitmap MaskImage = new Bitmap(640, 480);
            if (backgroundFrame == null)
            {
                Frames2Ignore--;
                if (Frames2Ignore == 0)
                {
                    backgroundFrame = (Bitmap)e.WebCamImage;
                    backgroundFrameGray = grayscaleFilter.Apply(backgroundFrame);
                }
                return;
            }

            //Save curent image
            CurrentFrame = (Bitmap)e.WebCamImage;
            CurrentFrameGray = grayscaleFilter.Apply(CurrentFrame);

            /*
            // create filter
            IFilter pixellateFilter = new Pixellate();
            // apply the filter
            backgroundFrame = pixellateFilter.Apply(backgroundFrame);
            backgroundFrameGray = grayscaleFilter.Apply(backgroundFrame);
            CurrentFrame = pixellateFilter.Apply(CurrentFrame);
            CurrentFrameGray = grayscaleFilter.Apply(CurrentFrame);*/

            MoveTowards moveTowardsFilter = new MoveTowards();
            moveTowardsFilter.OverlayImage = CurrentFrameGray;
            // move background towards current frame
            Bitmap tmp = moveTowardsFilter.Apply(backgroundFrameGray);
            // dispose old background
            backgroundFrame.Dispose();
            backgroundFrame = tmp;

            // create processing filters sequence

            FiltersSequence processingFilter = new FiltersSequence();
            processingFilter.Add(new Difference(backgroundFrameGray));
            processingFilter.Add(new Threshold(15));
            processingFilter.Add(new Opening());
            processingFilter.Add(new Edges());
            processingFilter.Add(new DifferenceEdgeDetector());

            // apply the filter

            Bitmap tmp1 = processingFilter.Apply(CurrentFrameGray);
            // extract red channel from the original image

            IFilter extrachChannel = new ExtractChannel(RGB.R);
            Bitmap redChannel = extrachChannel.Apply(backgroundFrame);

            //  merge red channel with moving object borders

            Merge mergeFilter = new Merge();
            mergeFilter.OverlayImage = tmp1;
            Bitmap tmp2 = mergeFilter.Apply(redChannel);
            // replace red channel in the original image

            ReplaceChannel replaceChannel = new ReplaceChannel(RGB.R, tmp2);
            replaceChannel.ChannelImage = tmp2;
            Bitmap tmp3 = replaceChannel.Apply(backgroundFrame);
            StateMgr.Execute(tmp1);
            if (eChangedCursorEvent != null && StateMgr.Val == Webcam.ValidLocation.TRUE)
            {
                //Console.WriteLine("X={0} , Y={1}", StateMgr.CurrState.CurrX, StateMgr.CurrState.CurrY);
                eChangedCursorEvent(StateMgr.CurrState.CurrX, StateMgr.CurrState.CurrY);
                for (int i = -4; i <= 4; i++)
                    for (int j = -4; j <= 4;j++ )
                        tmp3.SetPixel(StateMgr.CurrState.CurrX+i, StateMgr.CurrState.CurrY+j, Color.Blue);
                //eChangedCursorEvent(StateMgr.CurrState.CurrX, 100);
                //eChangedCursorEvent(100, StateMgr.CurrState.CurrY);
            }
            _CaptureImage.Image = tmp1;
            _CaptureImage2.Image = tmp3;
            backgroundFrame = (Bitmap)e.WebCamImage;
            backgroundFrameGray = grayscaleFilter.Apply(backgroundFrame);
        }
Beispiel #4
0
        void webcam_ImageCaptured_Back(object source, WebcamEventArgs e)
        {
            _FrameImage.Image = e.WebCamImage;
            Bitmap MaskImage = new Bitmap(640, 480);
            if (backgroundFrame == null)
            {
                Frames2Ignore--;
                if (Frames2Ignore == 0)
                {
                    backgroundFrame = (Bitmap)e.WebCamImage;
                    backgroundFrameGray = grayscaleFilter.Apply(backgroundFrame);
                }
                return;
            }

            //Save curent image
            CurrentFrame = (Bitmap)e.WebCamImage;
            CurrentFrameGray = grayscaleFilter.Apply(CurrentFrame);

            /*
            // create filter
            IFilter pixellateFilter = new Pixellate();
            // apply the filter
            backgroundFrame = pixellateFilter.Apply(backgroundFrame);
            backgroundFrameGray = grayscaleFilter.Apply(backgroundFrame);
            CurrentFrame = pixellateFilter.Apply(CurrentFrame);
            CurrentFrameGray = grayscaleFilter.Apply(CurrentFrame);*/

            MoveTowards moveTowardsFilter = new MoveTowards();
            moveTowardsFilter.OverlayImage = CurrentFrameGray;
            // move background towards current frame
            Bitmap tmp = moveTowardsFilter.Apply(backgroundFrameGray);
            // dispose old background
            backgroundFrame.Dispose();
            backgroundFrame = tmp;

            // create processing filters sequence

            FiltersSequence processingFilter = new FiltersSequence();
            processingFilter.Add(new Difference(backgroundFrameGray));
            processingFilter.Add(new Threshold(15));
            processingFilter.Add(new Opening());
            processingFilter.Add(new Edges());
            processingFilter.Add(new DifferenceEdgeDetector());

            // apply the filter

            Bitmap tmp1 = processingFilter.Apply(CurrentFrameGray);
            // extract red channel from the original image

            IFilter extrachChannel = new ExtractChannel(RGB.R);
            Bitmap redChannel = extrachChannel.Apply(backgroundFrame);

            //  merge red channel with moving object borders

            Merge mergeFilter = new Merge();
            mergeFilter.OverlayImage = tmp1;
            Bitmap tmp2 = mergeFilter.Apply(redChannel);
            // replace red channel in the original image

            ReplaceChannel replaceChannel = new ReplaceChannel(RGB.R,tmp2);
            replaceChannel.ChannelImage = tmp2;
            Bitmap tmp3 = replaceChannel.Apply(backgroundFrame);

            ConnectedComponentsLabeling CCL = new ConnectedComponentsLabeling();
            CCL.MinWidth = 75;
            CCL.MinHeight = 75;
            CCL.CoupledSizeFiltering = true;
            Bitmap tmp4 = CCL.Apply(tmp1);

            blobCounter.MinHeight = 75;
            blobCounter.MinWidth = 75;
            blobCounter.CoupledSizeFiltering = true;
            blobCounter.ProcessImage(tmp1);
            Blob[] blobs = blobCounter.GetObjects(tmp1);
            int maxSize = 0;
            Blob maxObject = new Blob(0, new Rectangle(0, 0, 0, 0));
            // find biggest blob
            Bitmap Masked = new Bitmap(320, 240);
            if (blobs != null)
            {
                foreach (Blob blob in blobs)
                {
                    int blobSize = blob.Rectangle.Width * blob.Rectangle.Height;

                    if (blobSize > maxSize)
                    {
                        maxSize = blobSize;
                        maxObject = blob;
                    }
                }

                for (int i = maxObject.Rectangle.Left; i < maxObject.Rectangle.Right; i++)
                {
                    for (int j = maxObject.Rectangle.Top; j < maxObject.Rectangle.Bottom; j++)
                    {
                        Masked.SetPixel(i, j, maxObject.Image.GetPixel(i - maxObject.Rectangle.Left, j - maxObject.Rectangle.Top));
                    }
                }
            }

            /*Bitmap Hor = new Bitmap(320, 240);
            Bitmap Ver = new Bitmap(320, 240);
            if (maxSize > 150)
            {
                AForge.Imaging.VerticalIntensityStatistics VIS = new VerticalIntensityStatistics(tmp1);
                int[] HistVer = VIS.Gray.Values;
                AForge.Imaging.HorizontalIntensityStatistics HIS = new HorizontalIntensityStatistics(tmp1);
                int[] HistHor = HIS.Gray.Values;

                for (int x=0;x<320;x++)
                    for (int y = 0; y < 240; y++)
                    {
                        Hor.SetPixel(x, y, Color.White);
                        Ver.SetPixel(x, y, Color.White);
                    }
                int Imax = -1, Max = -1;
                for (int i = 0; i < HistHor.Length; i++)
                {
                    for (int y = 0; y < ((double)(HistHor[i]) / 255) ; y++)
                        Hor.SetPixel(i, y, Color.Black);
                        if (HistHor[i] > 0)
                        {
                            Imax = i;
                            Max = HistHor[i];
                        }
                }
                int ImaxY = -1, MaxY = -1;
                for (int i = 0; i < HistVer.Length; i++)
                {
                    for (int x = 0; x < ((double)(HistVer[i]) / 255) ; x++)
                        Ver.SetPixel(x, i, Color.Black);
                    if (HistVer[i] > MaxY)
                    {
                        ImaxY = i;
                        MaxY = HistVer[i];
                    }
                }
            }

            */
               /* blobCounter.MinHeight = 75;
            blobCounter.MinWidth = 75;
            blobCounter.CoupledSizeFiltering = true;
            blobCounter.ProcessImage(tmp1);

            Blob[] blobs = blobCounter.GetObjects(tmp1);
            int maxSize = 0;
            Blob maxObject = new Blob(0, new Rectangle(0, 0, 0, 0));
            // find biggest blob
            if (blobs != null)
            {
                foreach (Blob blob in blobs)
                {
                    int blobSize = blob.Rectangle.Width * blob.Rectangle.Height;

                    if (blobSize > maxSize)
                    {
                        maxSize = blobSize;
                        maxObject = blob;
                    }
                }

                if (maxObject.Rectangle.Height > 90 && maxObject.Rectangle.Width > 30)
                {
                    AForge.Imaging.VerticalIntensityStatistics VIS = new VerticalIntensityStatistics(maxObject.Image);
                    int[] HistVer = VIS.Gray.Values;
                    AForge.Imaging.HorizontalIntensityStatistics HIS = new HorizontalIntensityStatistics(maxObject.Image);
                    int[] HistHor = HIS.Gray.Values;

                    int Imax = -1, Max = -1;
                    for (int i = 0; i < HistHor.Length; i++)
                    {
                        if (HistHor[i] > 0)
                        {
                            Imax = i;
                            Max = HistHor[i];
                            break;
                        }
                    }
                    int ImaxY = -1, MaxY = -1;
                    for (int i = 0; i < HistVer.Length; i++)
                    {
                        if (HistVer[i] > MaxY)
                        {
                            ImaxY = i;
                            MaxY = HistVer[i];
                        }
                    }
                    //Imax = 0;
                    ImaxY = 0;

                    Console.WriteLine("X={0},Y={1}", maxObject.Rectangle.X, maxObject.Rectangle.Y);
                    if (eChangedCursorEvent != null && maxSize != 0)
                        eChangedCursorEvent(maxObject.Rectangle.X + Imax, maxObject.Rectangle.Y + ImaxY);
                    LastX = maxObject.Rectangle.X;
                    LastY = maxObject.Rectangle.Y;
                }*/
                /*else if (LastX != -1 && LastY != -1 && maxSize > 0)
                {
                    //Calc distance from LastX,LastY
                    double distX = System.Math.Pow(maxObject.Rectangle.X - LastX, 2);
                    double distY = System.Math.Pow(maxObject.Rectangle.Y - LastY, 2);
                    double dist = System.Math.Pow(distX + distY, 0.5);
                    if (dist < 15)
                    {
                        Console.WriteLine("X={0},Y={1}", maxObject.Rectangle.X, maxObject.Rectangle.Y);
                        if (eChangedCursorEvent != null && maxSize != 0)
                            eChangedCursorEvent(maxObject.Rectangle.X, maxObject.Rectangle.Y);
                        LastX = maxObject.Rectangle.X;
                        LastY = maxObject.Rectangle.Y;
                    }
                    else
                    {
                        LastX = -1;
                        LastY = -1;
                    }
                }
                else
                {
                    LastX = -1;
                    LastY = -1;
                }*/
            //}
            _CaptureImage.Image = maxObject.Image;
            //_CaptureImage.Image = tmp3;
            _CaptureImage2.Image = tmp4;
            backgroundFrame = (Bitmap)e.WebCamImage;
            backgroundFrameGray = grayscaleFilter.Apply(backgroundFrame);
        }
 /// <summary>
 /// processes Frame for Motion Detection based on frame comparison
 /// </summary>
 /// <param name="frame">
 /// Takes in 2 Bitmap parameters, currentFrame and backgroundFrame
 /// </param>
 /// <returns>
 /// frame in which motion is marked
 /// </returns>
 public Bitmap processFrame(params Bitmap[] frame)
 {
     Bitmap currentFrame = frame[0];
     // create grayscale filter (BT709)
     Grayscale filter = new Grayscale(0.2125, 0.7154, 0.0721);
     Bitmap GScurrentFrame = filter.Apply(currentFrame);
     if (this.backgroundFrame == null)
     {
         this.backgroundFrame = (Bitmap)GScurrentFrame.Clone();
         GScurrentFrame.Dispose();
         return currentFrame;
     }
     else
     {
         Difference differenceFilter = new Difference();
         IFilter thresholdFilter = new Threshold(15);
         // set backgroud frame as an overlay for difference filter
         differenceFilter.OverlayImage = backgroundFrame;
         Bitmap tmp = thresholdFilter.Apply(differenceFilter.Apply(GScurrentFrame));
         //reduce noise
         IFilter erosionFilter = new Erosion();
         Bitmap tmp1 = erosionFilter.Apply(tmp);
         tmp.Dispose();
         // Highlight Motions
         IFilter extractChannel = new ExtractChannel(RGB.G);
         Bitmap redChannel = extractChannel.Apply(currentFrame);
         Merge mergeFilter = new Merge();
         mergeFilter.OverlayImage = tmp1;
         Bitmap t3 = mergeFilter.Apply(redChannel);
         ReplaceChannel rc = new ReplaceChannel(RGB.G, t3);
         t3 = rc.Apply(currentFrame);
         this.backgroundFrame = (Bitmap)GScurrentFrame.Clone();
         redChannel.Dispose();
         tmp1.Dispose();
         GScurrentFrame.Dispose();
         return t3;
     }
 }