/// <summary> /// Process the filter on the specified image. /// </summary> /// /// <param name="sourceData">Source image data.</param> /// <param name="destinationData">Destination image data.</param> /// protected override void ProcessFilter(UnmanagedImage sourceData, UnmanagedImage destinationData) { if (sourceData.PixelFormat == PixelFormat.Format8bppIndexed) { sourceData = toRGB.Apply(sourceData); } // Copy image contents sourceData.Copy(destinationData); Bitmap managedImage = destinationData.ToManagedImage(makeCopy: false); using (Graphics g = Graphics.FromImage(managedImage)) using (Pen positive = new Pen(Color.Red)) using (Pen negative = new Pen(Color.Blue)) using (Pen line = new Pen(Color.FromArgb(0, 255, 0))) { // mark all points foreach (SpeededUpRobustFeaturePoint p in points) { int S = 2 * (int)(2.5f * p.Scale); int R = (int)(S / 2f); Point pt = new Point((int)p.X, (int)p.Y); Point ptR = new Point((int)(R * System.Math.Cos(p.Orientation)), (int)(R * System.Math.Sin(p.Orientation))); Pen myPen = (p.Laplacian > 0 ? negative : positive); g.DrawEllipse(myPen, pt.X - R, pt.Y - R, S, S); g.DrawLine(line, new Point(pt.X, pt.Y), new Point(pt.X + ptR.X, pt.Y + ptR.Y)); } } }
public static void ConvertToGrayscale(UnmanagedImage source, UnmanagedImage destination) { if (source.PixelFormat != PixelFormat.Format8bppIndexed) { Grayscale.CommonAlgorithms.BT709.Apply(source, destination); } else { source.Copy(destination); } }
public unsafe static void CopyWithOffset(this UnmanagedImage source, UnmanagedImage destination, IntPoint offset) { if (source.Width + offset.X > destination.Width || source.Height + offset.Y > destination.Height || offset.X < 0 || offset.Y < 0) { throw new Exception("Invalid parameters"); } byte *ptr = (byte *)destination.ImageData.ToPointer(); ptr += offset.Y * destination.Stride; ptr += offset.X * Bitmap.GetPixelFormatSize(destination.PixelFormat) / 8; using (UnmanagedImage destinationAreaWrapper = new UnmanagedImage(new IntPtr(ptr), source.Width, source.Height, destination.Stride, destination.PixelFormat)) { source.Copy(destinationAreaWrapper); } }
/// <summary> /// Set background frame. /// </summary> /// /// <param name="backgroundFrame">Background frame to set.</param> /// /// <remarks><para>The method sets background frame, which will be used to calculate /// difference with.</para></remarks> /// public void SetBackgroundFrame(UnmanagedImage backgroundFrame) { // reset motion detection algorithm Reset(true); lock ( _sync ) { // save image dimension _width = backgroundFrame.Width; _height = backgroundFrame.Height; // alocate memory for previous and current frames _backgroundFrame = UnmanagedImage.Create(_width, _height, backgroundFrame.PixelFormat); // convert source frame to grayscale backgroundFrame.Copy(_backgroundFrame); _manuallySetBackgroundFrame = true; } }
/// <summary> /// Set background frame. /// </summary> /// /// <param name="backgroundFrame">Background frame to set.</param> /// /// <remarks><para>The method sets background frame, which will be used to calculate /// difference with.</para></remarks> /// public void SetBackgroundFrame(UnmanagedImage backgroundFrame) { // reset motion detection algorithm Reset(true); lock ( sync ) { // save image dimension width = backgroundFrame.Width; height = backgroundFrame.Height; // alocate memory for previous and current frames this.backgroundFrame = UnmanagedImage.Create(width, height, backgroundFrame.PixelFormat); frameSize = this.backgroundFrame.Stride * height; // convert source frame to grayscale backgroundFrame.Copy(this.backgroundFrame); manuallySetBackgroundFrame = true; } }
public void Correct(UnmanagedImage img, double aFocalLinPixels, int limit, double scale, int offx, int offy) { if (Math.Abs(_aFocalLinPixels - aFocalLinPixels) > Double.Epsilon || limit != _mFeLimit || Math.Abs(scale - _mScaleFeSize) > Double.Epsilon || img.Width != _w || img.Height != _h || _offsetx != offx || _offsety != offy) { Init(aFocalLinPixels, limit, scale, img.Width, img.Height, offx, offy); } var correctImage = UnmanagedImage.Create(img.Width, img.Height, img.PixelFormat); img.Copy(correctImage); int c = 0; for (int x = 0; x < _w; x++) { for (int y = 0; y < _h; y++) { img.SetPixel(x, y, correctImage.GetPixel(_map[c, 0], _map[c, 1])); c++; } } correctImage.Dispose(); }
/// <summary> /// Process new video frame. /// </summary> /// /// <param name="videoFrame">Video frame to process (detect motion in).</param> /// /// <remarks><para>Processes new frame from video source and detects motion in it.</para> /// /// <para>Check <see cref="MotionLevel"/> property to get information about amount of motion /// (changes) in the processed frame.</para> /// </remarks> /// public unsafe void ProcessFrame(UnmanagedImage videoFrame) { lock ( _sync ) { // check previous frame if (_previousFrame == null) { // save image dimension _width = videoFrame.Width; _height = videoFrame.Height; // alocate memory for previous and current frames _previousFrame = UnmanagedImage.Create(_width, _height, videoFrame.PixelFormat); _motionFrame = UnmanagedImage.Create(_width, _height, PixelFormat.Format8bppIndexed); _motionSize = _motionFrame.Stride * _height; // temporary buffer if (_suppressNoise) { _tempFrame = UnmanagedImage.Create(_width, _height, PixelFormat.Format8bppIndexed); } // conpy source frame videoFrame.Copy(_previousFrame); return; } // check image dimension if ((videoFrame.Width != _width) || (videoFrame.Height != _height)) { return; } // pointers to previous and current frames byte *prevFrame = (byte *)_previousFrame.ImageData.ToPointer( ); byte *currFrame = (byte *)videoFrame.ImageData.ToPointer( ); byte *motion = (byte *)_motionFrame.ImageData.ToPointer( ); int bytesPerPixel = Tools.BytesPerPixel(videoFrame.PixelFormat); // difference value // 1 - get difference between frames // 2 - threshold the difference (accumulated over every channels) // 3 - copy current frame to previous frame for (int i = 0; i < _height; i++) { var currFrameLocal = currFrame; var prevFrameLocal = prevFrame; var motionLocal = motion; for (int j = 0; j < _width; j++) { var diff = 0; for (int nbBytes = 0; nbBytes < bytesPerPixel; nbBytes++) { // difference diff += Math.Abs(*currFrameLocal - *prevFrameLocal); // copy current frame to previous *prevFrameLocal = *currFrameLocal; currFrameLocal++; prevFrameLocal++; } diff /= bytesPerPixel; // threshold *motionLocal = (diff >= _differenceThreshold) ? (byte)255 : (byte)0; motionLocal++; } currFrame += videoFrame.Stride; prevFrame += _previousFrame.Stride; motion += _motionFrame.Stride; } if (_suppressNoise) { // suppress noise and calculate motion amount Accord.SystemTools.CopyUnmanagedMemory(_tempFrame.ImageData, _motionFrame.ImageData, _motionSize); _erosionFilter.Apply(_tempFrame, _motionFrame); } // calculate amount of motion pixels _pixelsChanged = 0; motion = (byte *)_motionFrame.ImageData.ToPointer( ); for (int i = 0; i < _motionSize; i++, motion++) { _pixelsChanged += (*motion & 1); } } }
/// <summary> /// Process the filter on the specified image. /// </summary> /// /// <param name="source">Source image data.</param> /// <param name="destination">Destination image data.</param> /// <param name="rect">Image rectangle for processing by the filter.</param> /// protected override unsafe void ProcessFilter(UnmanagedImage source, UnmanagedImage destination, Rectangle rect) { source.Copy(destination); // processing start and stop X,Y positions var startX = 1; var startY = 1; var stopX = rect.Width - 1; var stopY = rect.Height - 1; var dstStride = destination.Stride; var dstOffset = dstStride - rect.Width + 2; var delSize = (rect.Width - 2) * (rect.Height - 2); // do the job var dst0 = (byte *)destination.ImageData.ToPointer(); IntPtr delPtr = IntPtr.Zero; RuntimeHelpers.PrepareConstrainedRegions(); try { delPtr = Marshal.AllocHGlobal(delSize); var del0 = (byte *)delPtr.ToPointer(); for (var i = 0; i < delSize; i++) { del0[i] = 0xFF; } bool endOfAlgorithm; do { endOfAlgorithm = true; // Setp 1 Process(startX, startY, stopX, stopY, dst0 + (rect.Top + 1) * dstStride + rect.Left + 1, dstStride, dstOffset, del0, new[] { 1, 3, 5, 3, 5, 7 }, ref endOfAlgorithm); // Deletion delete(startX, startY, stopX, stopY, dst0 + (rect.Top + 1) * dstStride + rect.Left + 1, dstOffset, del0); // Setp 2 Process(startX, startY, stopX, stopY, dst0 + (rect.Top + 1) * dstStride + rect.Left + 1, dstStride, dstOffset, del0, new[] { 1, 3, 7, 1, 5, 7 }, ref endOfAlgorithm); // Deletion delete(startX, startY, stopX, stopY, dst0 + (rect.Top + 1) * dstStride + rect.Left + 1, dstOffset, del0); } while (!endOfAlgorithm); } finally { Marshal.FreeHGlobal(delPtr); } #region Set colors // allign pointers to the first pixel to process dst0 = dst0 + (rect.Top + 1) * dstStride + rect.Left + 1; // for each line for (var y = startY; y < stopY; y++) { // for each pixel for (var x = startX; x < stopX; x++, dst0++) { *dst0 = (*dst0) == byte.MinValue ? bg : fg; } dst0 += dstOffset; } #endregion }
/// <summary> /// Process new video frame. /// </summary> /// /// <param name="videoFrame">Video frame to process (detect motion in).</param> /// /// <remarks><para>Processes new frame from video source and detects motion in it.</para> /// /// <para>Check <see cref="MotionLevel"/> property to get information about amount of motion /// (changes) in the processed frame.</para> /// </remarks> /// public unsafe void ProcessFrame(UnmanagedImage videoFrame) { lock ( sync ) { // check background frame if (backgroundFrame == null) { lastTimeMeasurment = DateTime.Now; // save image dimension width = videoFrame.Width; height = videoFrame.Height; // alocate memory for previous and current frames backgroundFrame = UnmanagedImage.Create(width, height, videoFrame.PixelFormat); motionFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed); frameSize = videoFrame.Stride * height; motionSize = motionFrame.Stride * motionFrame.Height; // temporary buffer if (suppressNoise) { tempFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed); } // set the backgroundframe videoFrame.Copy(backgroundFrame); return; } // check image dimension if ((videoFrame.Width != width) || (videoFrame.Height != height)) { return; } // pointers to background and current frames byte *backFrame; byte *currFrame; int diff; // update background frame if (millisecondsPerBackgroundUpdate == 0) { // update background frame using frame counter as a base if (++framesCounter == framesPerBackgroundUpdate) { framesCounter = 0; backFrame = (byte *)backgroundFrame.ImageData.ToPointer( ); currFrame = (byte *)videoFrame.ImageData.ToPointer( ); for (int i = 0; i < frameSize; i++, backFrame++, currFrame++) { diff = *currFrame - *backFrame; if (diff > 0) { (*backFrame)++; } else if (diff < 0) { (*backFrame)--; } } } } else { // update background frame using timer as a base // get current time and calculate difference DateTime currentTime = DateTime.Now; TimeSpan timeDff = currentTime - lastTimeMeasurment; // save current time as the last measurment lastTimeMeasurment = currentTime; int millisonds = (int)timeDff.TotalMilliseconds + millisecondsLeftUnprocessed; // save remainder so it could be taken into account in the future millisecondsLeftUnprocessed = millisonds % millisecondsPerBackgroundUpdate; // get amount for background update int updateAmount = (int)(millisonds / millisecondsPerBackgroundUpdate); backFrame = (byte *)backgroundFrame.ImageData.ToPointer( ); currFrame = (byte *)videoFrame.ImageData.ToPointer( ); for (int i = 0; i < frameSize; i++, backFrame++, currFrame++) { diff = *currFrame - *backFrame; if (diff > 0) { (*backFrame) += (byte)((diff < updateAmount) ? diff : updateAmount); } else if (diff < 0) { (*backFrame) += (byte)((-diff < updateAmount) ? diff : -updateAmount); } } } backFrame = (byte *)backgroundFrame.ImageData.ToPointer( ); currFrame = (byte *)videoFrame.ImageData.ToPointer( ); byte *motion = (byte *)motionFrame.ImageData.ToPointer( ); byte *currFrameLocal; byte *backFrameLocal; byte *motionLocal; int bytesPerPixel = Tools.BytesPerPixel(videoFrame.PixelFormat); // 1 - get difference between frames (accumulated on every channel) // 2 - threshold the difference for (int i = 0; i < height; i++) { currFrameLocal = currFrame; backFrameLocal = backFrame; motionLocal = motion; for (int j = 0; j < width; j++) { diff = 0; for (int nbBytes = 0; nbBytes < bytesPerPixel; nbBytes++) { // difference diff += Math.Abs((int)*currFrameLocal - (int)*backFrameLocal); currFrameLocal++; backFrameLocal++; } diff /= bytesPerPixel; *motionLocal = (diff >= differenceThreshold) ? (byte)255 : (byte)0; motionLocal++; } currFrame += videoFrame.Stride; backFrame += backgroundFrame.Stride; motion += motionFrame.Stride; } if (suppressNoise) { // suppress noise and calculate motion amount AForge.SystemTools.CopyUnmanagedMemory(tempFrame.ImageData, motionFrame.ImageData, motionSize); erosionFilter.Apply(tempFrame, motionFrame); if (keepObjectEdges) { AForge.SystemTools.CopyUnmanagedMemory(tempFrame.ImageData, motionFrame.ImageData, motionSize); dilatationFilter.Apply(tempFrame, motionFrame); } } // calculate amount of motion pixels pixelsChanged = 0; motion = (byte *)motionFrame.ImageData.ToPointer( ); for (int i = 0; i < motionSize; i++, motion++) { pixelsChanged += (*motion & 1); } } }
/// <summary> /// Process the filter on the specified image. /// </summary> /// /// <param name="sourceData">Source image data.</param> /// <param name="destinationData">Destination image data.</param> /// protected override void ProcessFilter(UnmanagedImage sourceData, UnmanagedImage destinationData) { if (sourceData.PixelFormat == PixelFormat.Format8bppIndexed) sourceData = toRGB.Apply(sourceData); // Copy image contents sourceData.Copy(destinationData); Bitmap managedImage = destinationData.ToManagedImage(makeCopy: false); using (Graphics g = Graphics.FromImage(managedImage)) using (Pen positive = new Pen(Color.Red)) using (Pen negative = new Pen(Color.Blue)) using (Pen line = new Pen(Color.FromArgb(0, 255, 0))) { // mark all points foreach (SpeededUpRobustFeaturePoint p in points) { int S = (int)(scale * p.Scale); int R = (int)(S / 2f); Point pt = new Point((int)p.X, (int)p.Y); Point ptR = new Point((int)(R * System.Math.Cos(p.Orientation)), (int)(R * System.Math.Sin(p.Orientation))); Pen myPen = (p.Laplacian > 0 ? negative : positive); g.DrawEllipse(myPen, pt.X - R, pt.Y - R, S, S); g.DrawLine(line, new Point(pt.X, pt.Y), new Point(pt.X + ptR.X, pt.Y + ptR.Y)); } } }
/// <summary> /// Process new video frame. /// </summary> /// /// <param name="videoFrame">Video frame to process (detect motion in).</param> /// /// <remarks><para>Processes new frame from video source and detects motion in it.</para> /// /// <para>Check <see cref="MotionLevel"/> property to get information about amount of motion /// (changes) in the processed frame.</para> /// </remarks> /// public unsafe void ProcessFrame(UnmanagedImage videoFrame) { lock ( sync ) { // check background frame if (backgroundFrame == null) { // save image dimension width = videoFrame.Width; height = videoFrame.Height; // alocate memory for background frame backgroundFrame = UnmanagedImage.Create(width, height, videoFrame.PixelFormat); frameSize = backgroundFrame.Stride * height; // convert source frame to grayscale videoFrame.Copy(backgroundFrame); return; } // check image dimension if ((videoFrame.Width != width) || (videoFrame.Height != height)) { return; } // check motion frame if (motionFrame == null) { motionFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed); motionSize = motionFrame.Stride * height; // temporary buffer if (suppressNoise) { tempFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed); } } // pointers to background and current frames byte *backFrame; byte *currFrame; int diff; backFrame = (byte *)backgroundFrame.ImageData.ToPointer( ); currFrame = (byte *)videoFrame.ImageData.ToPointer( ); byte *motion = (byte *)motionFrame.ImageData.ToPointer( ); byte *currFrameLocal; byte *backFrameLocal; byte *motionLocal; int bytesPerPixel = Tools.BytesPerPixel(videoFrame.PixelFormat); // 1 - get difference between frames // 2 - threshold the difference (accumulated over every channels) for (int i = 0; i < height; i++) { currFrameLocal = currFrame; backFrameLocal = backFrame; motionLocal = motion; for (int j = 0; j < width; j++) { diff = 0; for (int nbBytes = 0; nbBytes < bytesPerPixel; nbBytes++) { // difference diff += Math.Abs((int)*currFrameLocal - (int)*backFrameLocal); currFrameLocal++; backFrameLocal++; } diff /= bytesPerPixel; // threshold *motionLocal = (diff >= differenceThreshold) ? (byte)255 : (byte)0; motionLocal++; } currFrame += videoFrame.Stride; backFrame += backgroundFrame.Stride; motion += motionFrame.Stride; } if (suppressNoise) { // suppress noise and calculate motion amount AForge.SystemTools.CopyUnmanagedMemory(tempFrame.ImageData, motionFrame.ImageData, motionSize); erosionFilter.Apply(tempFrame, motionFrame); if (keepObjectEdges) { AForge.SystemTools.CopyUnmanagedMemory(tempFrame.ImageData, motionFrame.ImageData, motionSize); dilatationFilter.Apply(tempFrame, motionFrame); } } // calculate amount of motion pixels pixelsChanged = 0; motion = (byte *)motionFrame.ImageData.ToPointer( ); for (int i = 0; i < motionSize; i++, motion++) { pixelsChanged += (*motion & 1); } } }