/// <summary>
        /// Process new video frame.
        /// </summary>
        ///
        /// <param name="videoFrame">Video frame to process (detect motion in).</param>
        ///
        /// <remarks><para>Processes new frame from video source and detects motion in it.</para>
        ///
        /// <para>Check <see cref="MotionLevel"/> property to get information about amount of motion
        /// (changes) in the processed frame.</para>
        /// </remarks>
        ///
        public unsafe void ProcessFrame(UnmanagedImage videoFrame)
        {
            lock ( _sync )
            {
                // check background frame
                if (_backgroundFrame == null)
                {
                    // save image dimension
                    _width  = videoFrame.Width;
                    _height = videoFrame.Height;

                    // alocate memory for background frame
                    _backgroundFrame = UnmanagedImage.Create(_width, _height, videoFrame.PixelFormat);

                    // convert source frame to grayscale
                    videoFrame.Copy(_backgroundFrame);

                    return;
                }

                // check image dimension
                if ((videoFrame.Width != _width) || (videoFrame.Height != _height))
                {
                    return;
                }

                // check motion frame
                if (_motionFrame == null)
                {
                    _motionFrame = UnmanagedImage.Create(_width, _height, PixelFormat.Format8bppIndexed);
                    _motionSize  = _motionFrame.Stride * _height;

                    // temporary buffer
                    if (_suppressNoise)
                    {
                        _tempFrame = UnmanagedImage.Create(_width, _height, PixelFormat.Format8bppIndexed);
                    }
                }


                // pointers to background and current frames

                var   backFrame     = (byte *)_backgroundFrame.ImageData.ToPointer( );
                var   currFrame     = (byte *)videoFrame.ImageData.ToPointer( );
                byte *motion        = (byte *)_motionFrame.ImageData.ToPointer( );
                int   bytesPerPixel = Tools.BytesPerPixel(videoFrame.PixelFormat);

                // 1 - get difference between frames
                // 2 - threshold the difference (accumulated over every channels)
                for (int i = 0; i < _height; i++)
                {
                    var currFrameLocal = currFrame;
                    var backFrameLocal = backFrame;
                    var motionLocal    = motion;
                    for (int j = 0; j < _width; j++)
                    {
                        var diff = 0;
                        for (int nbBytes = 0; nbBytes < bytesPerPixel; nbBytes++)
                        {
                            // difference
                            diff += Math.Abs(*currFrameLocal - *backFrameLocal);
                            currFrameLocal++;
                            backFrameLocal++;
                        }
                        diff /= bytesPerPixel;
                        // threshold
                        *motionLocal = (diff >= _differenceThreshold) ? (byte)255 : (byte)0;
                        motionLocal++;
                    }
                    currFrame += videoFrame.Stride;
                    backFrame += _backgroundFrame.Stride;
                    motion    += _motionFrame.Stride;
                }

                if (_suppressNoise)
                {
                    // suppress noise and calculate motion amount
                    AForge.SystemTools.CopyUnmanagedMemory(_tempFrame.ImageData, _motionFrame.ImageData, _motionSize);
                    _erosionFilter.Apply(_tempFrame, _motionFrame);

                    if (_keepObjectEdges)
                    {
                        AForge.SystemTools.CopyUnmanagedMemory(_tempFrame.ImageData, _motionFrame.ImageData, _motionSize);
                        _dilatationFilter.Apply(_tempFrame, _motionFrame);
                    }
                }

                // calculate amount of motion pixels
                _pixelsChanged = 0;
                motion         = (byte *)_motionFrame.ImageData.ToPointer( );

                for (int i = 0; i < _motionSize; i++, motion++)
                {
                    _pixelsChanged += (*motion & 1);
                }
            }
        }
예제 #2
0
        /// <summary>
        /// Process new video frame.
        /// </summary>
        ///
        /// <param name="videoFrame">Video frame to process (detect motion in).</param>
        ///
        /// <remarks><para>Processes new frame from video source and detects motion in it.</para>
        ///
        /// <para>Check <see cref="MotionLevel"/> property to get information about amount of motion
        /// (changes) in the processed frame.</para>
        /// </remarks>
        ///
        public unsafe void ProcessFrame(UnmanagedImage videoFrame)
        {
            lock ( _sync )
            {
                // check background frame
                if (_backgroundFrame == null)
                {
                    _lastTimeMeasurment = DateTime.Now;

                    // save image dimension
                    _width  = videoFrame.Width;
                    _height = videoFrame.Height;

                    // alocate memory for previous and current frames
                    _backgroundFrame = UnmanagedImage.Create(_width, _height, PixelFormat.Format8bppIndexed);
                    _motionFrame     = UnmanagedImage.Create(_width, _height, PixelFormat.Format8bppIndexed);

                    _frameSize = _motionFrame.Stride * _height;

                    // temporary buffer
                    if (_suppressNoise)
                    {
                        _tempFrame = UnmanagedImage.Create(_width, _height, PixelFormat.Format8bppIndexed);
                    }

                    // convert source frame to grayscale
                    Tools.ConvertToGrayscale(videoFrame, _backgroundFrame);

                    return;
                }

                // check image dimension
                if ((videoFrame.Width != _width) || (videoFrame.Height != _height))
                {
                    return;
                }

                // convert current image to grayscale
                Tools.ConvertToGrayscale(videoFrame, _motionFrame);

                // pointers to background and current frames
                byte *backFrame;
                byte *currFrame;
                int   diff;

                // update background frame
                if (_millisecondsPerBackgroundUpdate == 0)
                {
                    // update background frame using frame counter as a base
                    if (++_framesCounter == _framesPerBackgroundUpdate)
                    {
                        _framesCounter = 0;

                        backFrame = (byte *)_backgroundFrame.ImageData.ToPointer( );
                        currFrame = (byte *)_motionFrame.ImageData.ToPointer( );

                        for (int i = 0; i < _frameSize; i++, backFrame++, currFrame++)
                        {
                            diff = *currFrame - *backFrame;
                            if (diff > 0)
                            {
                                (*backFrame)++;
                            }
                            else if (diff < 0)
                            {
                                (*backFrame)--;
                            }
                        }
                    }
                }
                else
                {
                    // update background frame using timer as a base

                    // get current time and calculate difference
                    DateTime currentTime = DateTime.Now;
                    TimeSpan timeDff     = currentTime - _lastTimeMeasurment;
                    // save current time as the last measurment
                    _lastTimeMeasurment = currentTime;

                    int millisonds = (int)timeDff.TotalMilliseconds + _millisecondsLeftUnprocessed;

                    // save remainder so it could be taken into account in the future
                    _millisecondsLeftUnprocessed = millisonds % _millisecondsPerBackgroundUpdate;
                    // get amount for background update
                    int updateAmount = (millisonds / _millisecondsPerBackgroundUpdate);

                    backFrame = (byte *)_backgroundFrame.ImageData.ToPointer( );
                    currFrame = (byte *)_motionFrame.ImageData.ToPointer( );

                    for (int i = 0; i < _frameSize; i++, backFrame++, currFrame++)
                    {
                        diff = *currFrame - *backFrame;
                        if (diff > 0)
                        {
                            (*backFrame) += (byte)((diff < updateAmount) ? diff :  updateAmount);
                        }
                        else if (diff < 0)
                        {
                            (*backFrame) += (byte)((-diff < updateAmount) ? diff : -updateAmount);
                        }
                    }
                }

                backFrame = (byte *)_backgroundFrame.ImageData.ToPointer( );
                currFrame = (byte *)_motionFrame.ImageData.ToPointer( );

                // 1 - get difference between frames
                // 2 - threshold the difference
                for (int i = 0; i < _frameSize; i++, backFrame++, currFrame++)
                {
                    // difference
                    diff = *currFrame - *backFrame;
                    // treshold
                    *currFrame = ((diff >= _differenceThreshold) || (diff <= _differenceThresholdNeg)) ? (byte)255 : (byte)0;
                }

                if (_suppressNoise)
                {
                    // suppress noise and calculate motion amount
                    AForge.SystemTools.CopyUnmanagedMemory(_tempFrame.ImageData, _motionFrame.ImageData, _frameSize);
                    _erosionFilter.Apply(_tempFrame, _motionFrame);

                    if (_keepObjectEdges)
                    {
                        AForge.SystemTools.CopyUnmanagedMemory(_tempFrame.ImageData, _motionFrame.ImageData, _frameSize);
                        _dilatationFilter.Apply(_tempFrame, _motionFrame);
                    }
                }

                // calculate amount of motion pixels
                _pixelsChanged = 0;
                byte *motion = (byte *)_motionFrame.ImageData.ToPointer( );

                for (int i = 0; i < _frameSize; i++, motion++)
                {
                    _pixelsChanged += (*motion & 1);
                }
            }
        }
        /// <summary>
        /// Process new video frame.
        /// </summary>
        ///
        /// <param name="videoFrame">Video frame to process (detect motion in).</param>
        ///
        /// <remarks><para>Processes new frame from video source and detects motion in it.</para>
        ///
        /// <para>Check <see cref="MotionLevel"/> property to get information about amount of motion
        /// (changes) in the processed frame.</para>
        /// </remarks>
        ///
        public unsafe void ProcessFrame(UnmanagedImage videoFrame)
        {
            lock ( sync )
            {
                // check background frame
                if (backgroundFrame == null)
                {
                    // save image dimension
                    width  = videoFrame.Width;
                    height = videoFrame.Height;

                    // alocate memory for background frame
                    backgroundFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed);
                    frameSize       = backgroundFrame.Stride * height;

                    // convert source frame to grayscale
                    Tools.ConvertToGrayscale(videoFrame, backgroundFrame);

                    return;
                }

                // check image dimension
                if ((videoFrame.Width != width) || (videoFrame.Height != height))
                {
                    return;
                }

                // check motion frame
                if (motionFrame == null)
                {
                    motionFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed);

                    // temporary buffer
                    if (suppressNoise)
                    {
                        tempFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed);
                    }
                }

                // convert current image to grayscale
                Tools.ConvertToGrayscale(videoFrame, motionFrame);

                // pointers to background and current frames
                byte *backFrame;
                byte *currFrame;
                int   diff;

                backFrame = (byte *)backgroundFrame.ImageData.ToPointer( );
                currFrame = (byte *)motionFrame.ImageData.ToPointer( );

                // 1 - get difference between frames
                // 2 - threshold the difference
                for (int i = 0; i < frameSize; i++, backFrame++, currFrame++)
                {
                    // difference
                    diff = (int)*currFrame - (int)*backFrame;
                    // treshold
                    *currFrame = ((diff >= differenceThreshold) || (diff <= differenceThresholdNeg)) ? (byte)255 : (byte)0;
                }

                if (suppressNoise)
                {
                    // suppress noise and calculate motion amount
                    AForge.SystemTools.CopyUnmanagedMemory(tempFrame.ImageData, motionFrame.ImageData, frameSize);
                    erosionFilter.Apply(tempFrame, motionFrame);

                    if (keepObjectEdges)
                    {
                        AForge.SystemTools.CopyUnmanagedMemory(tempFrame.ImageData, motionFrame.ImageData, frameSize);
                        dilatationFilter.Apply(tempFrame, motionFrame);
                    }
                }

                // calculate amount of motion pixels
                pixelsChanged = 0;
                byte *motion = (byte *)motionFrame.ImageData.ToPointer( );

                for (int i = 0; i < frameSize; i++, motion++)
                {
                    pixelsChanged += (*motion & 1);
                }
            }
        }
예제 #4
0
        /// <summary>
        /// Process new video frame.
        /// </summary>
        ///
        /// <param name="videoFrame">Video frame to process (detect motion in).</param>
        ///
        /// <remarks><para>Processes new frame from video source and detects motion in it.</para>
        ///
        /// <para>Check <see cref="MotionLevel"/> property to get information about amount of motion
        /// (changes) in the processed frame.</para>
        /// </remarks>
        ///
        public unsafe void ProcessFrame(UnmanagedImage videoFrame)
        {
            lock ( sync )
            {
                // check background frame
                if (backgroundFrame == null)
                {
                    lastTimeMeasurment = DateTime.Now;

                    // save image dimension
                    width  = videoFrame.Width;
                    height = videoFrame.Height;

                    // alocate memory for previous and current frames
                    backgroundFrame = UnmanagedImage.Create(width, height, videoFrame.PixelFormat);
                    motionFrame     = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed);

                    frameSize  = videoFrame.Stride * height;
                    motionSize = motionFrame.Stride * motionFrame.Height;

                    // temporary buffer
                    if (suppressNoise)
                    {
                        tempFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed);
                    }

                    // set the backgroundframe
                    videoFrame.Copy(backgroundFrame);

                    return;
                }

                // check image dimension
                if ((videoFrame.Width != width) || (videoFrame.Height != height))
                {
                    return;
                }


                // pointers to background and current frames
                byte *backFrame;
                byte *currFrame;
                int   diff;

                // update background frame
                if (millisecondsPerBackgroundUpdate == 0)
                {
                    // update background frame using frame counter as a base
                    if (++framesCounter == framesPerBackgroundUpdate)
                    {
                        framesCounter = 0;

                        backFrame = (byte *)backgroundFrame.ImageData.ToPointer( );
                        currFrame = (byte *)videoFrame.ImageData.ToPointer( );

                        for (int i = 0; i < frameSize; i++, backFrame++, currFrame++)
                        {
                            diff = *currFrame - *backFrame;
                            if (diff > 0)
                            {
                                (*backFrame)++;
                            }
                            else if (diff < 0)
                            {
                                (*backFrame)--;
                            }
                        }
                    }
                }
                else
                {
                    // update background frame using timer as a base

                    // get current time and calculate difference
                    DateTime currentTime = DateTime.Now;
                    TimeSpan timeDff     = currentTime - lastTimeMeasurment;
                    // save current time as the last measurment
                    lastTimeMeasurment = currentTime;

                    int millisonds = (int)timeDff.TotalMilliseconds + millisecondsLeftUnprocessed;

                    // save remainder so it could be taken into account in the future
                    millisecondsLeftUnprocessed = millisonds % millisecondsPerBackgroundUpdate;
                    // get amount for background update
                    int updateAmount = (int)(millisonds / millisecondsPerBackgroundUpdate);

                    backFrame = (byte *)backgroundFrame.ImageData.ToPointer( );
                    currFrame = (byte *)videoFrame.ImageData.ToPointer( );

                    for (int i = 0; i < frameSize; i++, backFrame++, currFrame++)
                    {
                        diff = *currFrame - *backFrame;
                        if (diff > 0)
                        {
                            (*backFrame) += (byte)((diff < updateAmount) ? diff :  updateAmount);
                        }
                        else if (diff < 0)
                        {
                            (*backFrame) += (byte)((-diff < updateAmount) ? diff : -updateAmount);
                        }
                    }
                }

                backFrame = (byte *)backgroundFrame.ImageData.ToPointer( );
                currFrame = (byte *)videoFrame.ImageData.ToPointer( );
                byte *motion = (byte *)motionFrame.ImageData.ToPointer( );
                byte *currFrameLocal;
                byte *backFrameLocal;
                byte *motionLocal;
                int   bytesPerPixel = Tools.BytesPerPixel(videoFrame.PixelFormat);

                // 1 - get difference between frames (accumulated on every channel)
                // 2 - threshold the difference
                for (int i = 0; i < height; i++)
                {
                    currFrameLocal = currFrame;
                    backFrameLocal = backFrame;
                    motionLocal    = motion;
                    for (int j = 0; j < width; j++)
                    {
                        diff = 0;
                        for (int nbBytes = 0; nbBytes < bytesPerPixel; nbBytes++)
                        {
                            // difference
                            diff += Math.Abs((int)*currFrameLocal - (int)*backFrameLocal);
                            currFrameLocal++;
                            backFrameLocal++;
                        }
                        diff /= bytesPerPixel;
                        *motionLocal = (diff >= differenceThreshold) ? (byte)255 : (byte)0;
                        motionLocal++;
                    }
                    currFrame += videoFrame.Stride;
                    backFrame += backgroundFrame.Stride;
                    motion    += motionFrame.Stride;
                }

                if (suppressNoise)
                {
                    // suppress noise and calculate motion amount
                    AForge.SystemTools.CopyUnmanagedMemory(tempFrame.ImageData, motionFrame.ImageData, motionSize);
                    erosionFilter.Apply(tempFrame, motionFrame);

                    if (keepObjectEdges)
                    {
                        AForge.SystemTools.CopyUnmanagedMemory(tempFrame.ImageData, motionFrame.ImageData, motionSize);
                        dilatationFilter.Apply(tempFrame, motionFrame);
                    }
                }

                // calculate amount of motion pixels
                pixelsChanged = 0;
                motion        = (byte *)motionFrame.ImageData.ToPointer( );

                for (int i = 0; i < motionSize; i++, motion++)
                {
                    pixelsChanged += (*motion & 1);
                }
            }
        }
        public unsafe void ProcessFrame(UnmanagedImage videoFrame)
        {
            lock (sync)
            {
                if (backgroundFrame == null)
                {
                    lastTimeMeasurment = DateTime.Now;

                    width  = videoFrame.Width;
                    height = videoFrame.Height;

                    backgroundFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed);
                    motionFrame     = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed);

                    frameSize = motionFrame.Stride * height;

                    if (suppressNoise)
                    {
                        tempFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed);
                    }

                    Tools.ConvertToGrayscale(videoFrame, backgroundFrame);

                    return;
                }

                if ((videoFrame.Width != width) || (videoFrame.Height != height))
                {
                    return;
                }

                Tools.ConvertToGrayscale(videoFrame, motionFrame);

                byte *backFrame;
                byte *currFrame;
                int   diff;

                if (millisecondsPerBackgroundUpdate == 0)
                {
                    if (++framesCounter == framesPerBackgroundUpdate)
                    {
                        framesCounter = 0;

                        backFrame = (byte *)backgroundFrame.ImageData.ToPointer();
                        currFrame = (byte *)motionFrame.ImageData.ToPointer();

                        for (int i = 0; i < frameSize; i++, backFrame++, currFrame++)
                        {
                            diff = *currFrame - *backFrame;
                            if (diff > 0)
                            {
                                (*backFrame)++;
                            }
                            else if (diff < 0)
                            {
                                (*backFrame)--;
                            }
                        }
                    }
                }
                else
                {
                    DateTime currentTime = DateTime.Now;
                    TimeSpan timeDff     = currentTime - lastTimeMeasurment;

                    lastTimeMeasurment = currentTime;

                    int millisonds = (int)timeDff.TotalMilliseconds + millisecondsLeftUnprocessed;

                    millisecondsLeftUnprocessed = millisonds % millisecondsPerBackgroundUpdate;

                    int updateAmount = (int)(millisonds / millisecondsPerBackgroundUpdate);

                    backFrame = (byte *)backgroundFrame.ImageData.ToPointer();
                    currFrame = (byte *)motionFrame.ImageData.ToPointer();

                    for (int i = 0; i < frameSize; i++, backFrame++, currFrame++)
                    {
                        diff = *currFrame - *backFrame;
                        if (diff > 0)
                        {
                            (*backFrame) += (byte)((diff < updateAmount) ? diff : updateAmount);
                        }
                        else if (diff < 0)
                        {
                            (*backFrame) += (byte)((-diff < updateAmount) ? diff : -updateAmount);
                        }
                    }
                }

                backFrame = (byte *)backgroundFrame.ImageData.ToPointer();
                currFrame = (byte *)motionFrame.ImageData.ToPointer();

                for (int i = 0; i < frameSize; i++, backFrame++, currFrame++)
                {
                    diff = (int)*currFrame - (int)*backFrame;

                    *currFrame = ((diff >= differenceThreshold) || (diff <= differenceThresholdNeg)) ? (byte)255 : (byte)0;
                }

                if (suppressNoise)
                {
                    SystemTools.CopyUnmanagedMemory(tempFrame.ImageData, motionFrame.ImageData, frameSize);
                    erosionFilter.Apply(tempFrame, motionFrame);

                    if (keepObjectEdges)
                    {
                        SystemTools.CopyUnmanagedMemory(tempFrame.ImageData, motionFrame.ImageData, frameSize);
                        dilatationFilter.Apply(tempFrame, motionFrame);
                    }
                }

                pixelsChanged = 0;
                byte *motion = (byte *)motionFrame.ImageData.ToPointer();

                for (int i = 0; i < frameSize; i++, motion++)
                {
                    pixelsChanged += (*motion & 1);
                }
            }
        }
        /// <summary>
        /// Process new video frame.
        /// </summary>
        ///
        /// <param name="videoFrame">Video frame to process (detect motion in).</param>
        ///
        /// <remarks><para>Processes new frame from video source and detects motion in it.</para>
        ///
        /// <para>Check <see cref="MotionLevel"/> property to get information about amount of motion
        /// (changes) in the processed frame.</para>
        /// </remarks>
        ///
        public unsafe void ProcessFrame(UnmanagedImage videoFrame)
        {
            lock (sync)
            {
                // check background frame
                if (backgroundFrame == null)
                {
                    lastTimeMeasurment = DateTime.Now;

                    // save image dimension
                    width  = videoFrame.Width;
                    height = videoFrame.Height;

                    // alocate memory for previous and current frames
                    backgroundFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed);
                    motionFrame     = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed);

                    frameSize = motionFrame.Stride * height;

                    // temporary buffer
                    if (suppressNoise)
                    {
                        tempFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed);
                    }

                    // convert source frame to grayscale
                    Tools.ConvertToGrayscale(videoFrame, backgroundFrame);

                    return;
                }

                // check image dimension
                if ((videoFrame.Width != width) || (videoFrame.Height != height))
                {
                    return;
                }

                // convert current image to grayscale
                Tools.ConvertToGrayscale(videoFrame, motionFrame);

                // pointers to background and current frames
                byte *backFrame;
                byte *currFrame;
                int   diff;

                // update background frame
                if (millisecondsPerBackgroundUpdate == 0)
                {
                    // update background frame using frame counter as a base
                    if (++framesCounter == framesPerBackgroundUpdate)
                    {
                        framesCounter = 0;

                        backFrame = (byte *)backgroundFrame.ImageData.ToPointer();
                        currFrame = (byte *)motionFrame.ImageData.ToPointer();

                        for (var i = 0; i < frameSize; i++, backFrame++, currFrame++)
                        {
                            diff = *currFrame - *backFrame;
                            if (diff > 0)
                            {
                                (*backFrame)++;
                            }
                            else if (diff < 0)
                            {
                                (*backFrame)--;
                            }
                        }
                    }
                }
                else
                {
                    // update background frame using timer as a base

                    // get current time and calculate difference
                    var currentTime = DateTime.Now;
                    var timeDff     = currentTime - lastTimeMeasurment;
                    // save current time as the last measurment
                    lastTimeMeasurment = currentTime;

                    var millisonds = (int)timeDff.TotalMilliseconds + millisecondsLeftUnprocessed;

                    // save remainder so it could be taken into account in the future
                    millisecondsLeftUnprocessed = millisonds % millisecondsPerBackgroundUpdate;
                    // get amount for background update
                    var updateAmount = millisonds / millisecondsPerBackgroundUpdate;

                    backFrame = (byte *)backgroundFrame.ImageData.ToPointer();
                    currFrame = (byte *)motionFrame.ImageData.ToPointer();

                    for (var i = 0; i < frameSize; i++, backFrame++, currFrame++)
                    {
                        diff = *currFrame - *backFrame;
                        if (diff > 0)
                        {
                            (*backFrame) += (byte)((diff < updateAmount) ? diff : updateAmount);
                        }
                        else if (diff < 0)
                        {
                            (*backFrame) += (byte)((-diff < updateAmount) ? diff : -updateAmount);
                        }
                    }
                }

                backFrame = (byte *)backgroundFrame.ImageData.ToPointer();
                currFrame = (byte *)motionFrame.ImageData.ToPointer();

                // 1 - get difference between frames
                // 2 - threshold the difference
                for (var i = 0; i < frameSize; i++, backFrame++, currFrame++)
                {
                    // difference
                    diff = *currFrame - *backFrame;
                    // treshold
                    *currFrame = ((diff >= differenceThreshold) || (diff <= differenceThresholdNeg)) ? 255 : 0;
                }

                if (suppressNoise)
                {
                    // suppress noise and calculate motion amount
                    AForge.SystemTools.CopyUnmanagedMemory(tempFrame.ImageData, motionFrame.ImageData, frameSize);
                    erosionFilter.Apply(tempFrame, motionFrame);

                    if (keepObjectEdges)
                    {
                        AForge.SystemTools.CopyUnmanagedMemory(tempFrame.ImageData, motionFrame.ImageData, frameSize);
                        dilatationFilter.Apply(tempFrame, motionFrame);
                    }
                }

                // calculate amount of motion pixels
                pixelsChanged = 0;
                var motion = (byte *)motionFrame.ImageData.ToPointer();

                for (var i = 0; i < frameSize; i++, motion++)
                {
                    pixelsChanged += (*motion & 1);
                }
            }
        }
        /// <summary>
        /// Process new video frame.
        /// </summary>
        ///
        /// <param name="videoFrame">Video frame to process (detect motion in).</param>
        ///
        /// <remarks><para>Processes new frame from video source and detects motion in it.</para>
        ///
        /// <para>Check <see cref="MotionLevel"/> property to get information about amount of motion
        /// (changes) in the processed frame.</para>
        /// </remarks>
        ///
        public unsafe void ProcessFrame(UnmanagedImage videoFrame)
        {
            lock ( _sync )
            {
                // check background frame
                if (_backgroundFrame == null)
                {
                    // save image dimension
                    _width  = videoFrame.Width;
                    _height = videoFrame.Height;

                    // alocate memory for background frame
                    _backgroundFrame = UnmanagedImage.Create(_width, _height, PixelFormat.Format8bppIndexed);
                    _frameSize       = _backgroundFrame.Stride * _height;

                    // convert source frame to grayscale
                    Tools.ConvertToGrayscale(videoFrame, _backgroundFrame);

                    return;
                }

                // check image dimension
                if ((videoFrame.Width != _width) || (videoFrame.Height != _height))
                {
                    return;
                }

                // check motion frame
                if (_motionFrame == null)
                {
                    _motionFrame = UnmanagedImage.Create(_width, _height, PixelFormat.Format8bppIndexed);

                    // temporary buffer
                    if (_suppressNoise)
                    {
                        _tempFrame = UnmanagedImage.Create(_width, _height, PixelFormat.Format8bppIndexed);
                    }
                }

                // convert current image to grayscale
                Tools.ConvertToGrayscale(videoFrame, _motionFrame);

                // pointers to background and current frames

                var backFrame = (byte *)_backgroundFrame.ImageData.ToPointer( );
                var currFrame = (byte *)_motionFrame.ImageData.ToPointer( );

                // 1 - get difference between frames
                // 2 - threshold the difference
                for (int i = 0; i < _frameSize; i++, backFrame++, currFrame++)
                {
                    // difference
                    var diff = *currFrame - *backFrame;
                    // treshold
                    *currFrame = ((diff >= _differenceThreshold) || (diff <= _differenceThresholdNeg)) ? (byte)255 : (byte)0;
                }

                if (_suppressNoise)
                {
                    // suppress noise and calculate motion amount
                    AForge.SystemTools.CopyUnmanagedMemory(_tempFrame.ImageData, _motionFrame.ImageData, _frameSize);
                    _erosionFilter.Apply(_tempFrame, _motionFrame);

                    if (_keepObjectEdges)
                    {
                        AForge.SystemTools.CopyUnmanagedMemory(_tempFrame.ImageData, _motionFrame.ImageData, _frameSize);
                        _dilatationFilter.Apply(_tempFrame, _motionFrame);
                    }
                }

                // calculate amount of motion pixels
                _pixelsChanged = 0;
                byte *motion = (byte *)_motionFrame.ImageData.ToPointer( );

                for (int i = 0; i < _frameSize; i++, motion++)
                {
                    _pixelsChanged += (*motion & 1);
                }
            }
        }