public unsafe void ProcessFrame(UnmanagedImage videoFrame) { lock (sync) { if (previousFrame == null) { width = videoFrame.Width; height = videoFrame.Height; previousFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed); motionFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed); frameSize = motionFrame.Stride * height; if (suppressNoise) { tempFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed); } Tools.ConvertToGrayscale(videoFrame, previousFrame); return; } if ((videoFrame.Width != width) || (videoFrame.Height != height)) { return; } Tools.ConvertToGrayscale(videoFrame, motionFrame); byte *prevFrame = (byte *)previousFrame.ImageData.ToPointer(); byte *currFrame = (byte *)motionFrame.ImageData.ToPointer(); int diff; for (int i = 0; i < frameSize; i++, prevFrame++, currFrame++) { diff = (int)*currFrame - (int)*prevFrame; *prevFrame = *currFrame; *currFrame = ((diff >= differenceThreshold) || (diff <= differenceThresholdNeg)) ? (byte)255 : (byte)0; } if (suppressNoise) { SystemTools.CopyUnmanagedMemory(tempFrame.ImageData, motionFrame.ImageData, frameSize); erosionFilter.Apply(tempFrame, motionFrame); } pixelsChanged = 0; byte *motion = (byte *)motionFrame.ImageData.ToPointer(); for (int i = 0; i < frameSize; i++, motion++) { pixelsChanged += (*motion & 1); } } }
/// <summary> /// Process new video frame. /// </summary> /// /// <param name="videoFrame">Video frame to process (detect motion in).</param> /// /// <remarks><para>Processes new frame from video source and detects motion in it.</para> /// /// <para>Check <see cref="MotionLevel"/> property to get information about amount of motion /// (changes) in the processed frame.</para> /// </remarks> /// public unsafe void ProcessFrame(UnmanagedImage videoFrame) { lock ( _sync ) { // check background frame if (_backgroundFrame == null) { _lastTimeMeasurment = DateTime.Now; // save image dimension _width = videoFrame.Width; _height = videoFrame.Height; // alocate memory for previous and current frames _backgroundFrame = UnmanagedImage.Create(_width, _height, PixelFormat.Format8bppIndexed); _motionFrame = UnmanagedImage.Create(_width, _height, PixelFormat.Format8bppIndexed); _frameSize = _motionFrame.Stride * _height; // temporary buffer if (_suppressNoise) { _tempFrame = UnmanagedImage.Create(_width, _height, PixelFormat.Format8bppIndexed); } // convert source frame to grayscale Tools.ConvertToGrayscale(videoFrame, _backgroundFrame); return; } // check image dimension if ((videoFrame.Width != _width) || (videoFrame.Height != _height)) { return; } // convert current image to grayscale Tools.ConvertToGrayscale(videoFrame, _motionFrame); // pointers to background and current frames byte *backFrame; byte *currFrame; int diff; // update background frame if (_millisecondsPerBackgroundUpdate == 0) { // update background frame using frame counter as a base if (++_framesCounter == _framesPerBackgroundUpdate) { _framesCounter = 0; backFrame = (byte *)_backgroundFrame.ImageData.ToPointer( ); currFrame = (byte *)_motionFrame.ImageData.ToPointer( ); for (int i = 0; i < _frameSize; i++, backFrame++, currFrame++) { diff = *currFrame - *backFrame; if (diff > 0) { (*backFrame)++; } else if (diff < 0) { (*backFrame)--; } } } } else { // update background frame using timer as a base // get current time and calculate difference DateTime currentTime = DateTime.Now; TimeSpan timeDff = currentTime - _lastTimeMeasurment; // save current time as the last measurment _lastTimeMeasurment = currentTime; int millisonds = (int)timeDff.TotalMilliseconds + _millisecondsLeftUnprocessed; // save remainder so it could be taken into account in the future _millisecondsLeftUnprocessed = millisonds % _millisecondsPerBackgroundUpdate; // get amount for background update int updateAmount = (millisonds / _millisecondsPerBackgroundUpdate); backFrame = (byte *)_backgroundFrame.ImageData.ToPointer( ); currFrame = (byte *)_motionFrame.ImageData.ToPointer( ); for (int i = 0; i < _frameSize; i++, backFrame++, currFrame++) { diff = *currFrame - *backFrame; if (diff > 0) { (*backFrame) += (byte)((diff < updateAmount) ? diff : updateAmount); } else if (diff < 0) { (*backFrame) += (byte)((-diff < updateAmount) ? diff : -updateAmount); } } } backFrame = (byte *)_backgroundFrame.ImageData.ToPointer( ); currFrame = (byte *)_motionFrame.ImageData.ToPointer( ); // 1 - get difference between frames // 2 - threshold the difference for (int i = 0; i < _frameSize; i++, backFrame++, currFrame++) { // difference diff = *currFrame - *backFrame; // treshold *currFrame = ((diff >= _differenceThreshold) || (diff <= _differenceThresholdNeg)) ? (byte)255 : (byte)0; } if (_suppressNoise) { // suppress noise and calculate motion amount AForge.SystemTools.CopyUnmanagedMemory(_tempFrame.ImageData, _motionFrame.ImageData, _frameSize); _erosionFilter.Apply(_tempFrame, _motionFrame); if (_keepObjectEdges) { AForge.SystemTools.CopyUnmanagedMemory(_tempFrame.ImageData, _motionFrame.ImageData, _frameSize); _dilatationFilter.Apply(_tempFrame, _motionFrame); } } // calculate amount of motion pixels _pixelsChanged = 0; byte *motion = (byte *)_motionFrame.ImageData.ToPointer( ); for (int i = 0; i < _frameSize; i++, motion++) { _pixelsChanged += (*motion & 1); } } }
/// <summary> /// Process new video frame. /// </summary> /// /// <param name="videoFrame">Video frame to process (detect motion in).</param> /// /// <remarks><para>Processes new frame from video source and detects motion in it.</para> /// /// <para>Check <see cref="MotionLevel"/> property to get information about amount of motion /// (changes) in the processed frame.</para> /// </remarks> /// public void ProcessFrame(UnmanagedImage videoFrame) { lock (sync) { // check previous frame if (previousFrame == null) { // save image dimension width = videoFrame.Width; height = videoFrame.Height; // alocate memory for previous and current frames previousFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed); motionFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed); frameSize = motionFrame.Stride * height; // temporary buffer if (suppressNoise) { tempFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed); } // convert source frame to grayscale Accord.Vision.Tools.ConvertToGrayscale(videoFrame, previousFrame); return; } // check image dimension if ((videoFrame.Width != width) || (videoFrame.Height != height)) { return; } // convert current image to grayscale Accord.Vision.Tools.ConvertToGrayscale(videoFrame, motionFrame); unsafe { // pointers to previous and current frames byte *prevFrame = (byte *)previousFrame.ImageData.ToPointer(); byte *currFrame = (byte *)motionFrame.ImageData.ToPointer(); // difference value int diff; // 1 - get difference between frames // 2 - threshold the difference // 3 - copy current frame to previous frame for (int i = 0; i < frameSize; i++, prevFrame++, currFrame++) { // difference diff = (int)*currFrame - (int)*prevFrame; // copy current frame to previous *prevFrame = *currFrame; // treshold *currFrame = ((diff >= differenceThreshold) || (diff <= differenceThresholdNeg)) ? (byte)255 : (byte)0; } if (suppressNoise) { // suppress noise and calculate motion amount Accord.SystemTools.CopyUnmanagedMemory(tempFrame.ImageData, motionFrame.ImageData, frameSize); erosionFilter.Apply(tempFrame, motionFrame); } // calculate amount of motion pixels pixelsChanged = 0; byte *motion = (byte *)motionFrame.ImageData.ToPointer(); for (int i = 0; i < frameSize; i++, motion++) { pixelsChanged += (*motion & 1); } } } }
/// <summary> /// Process new video frame. /// </summary> /// /// <param name="videoFrame">Video frame to process (detect motion in).</param> /// /// <remarks><para>Processes new frame from video source and detects motion in it.</para> /// /// <para>Check <see cref="MotionLevel"/> property to get information about amount of motion /// (changes) in the processed frame.</para> /// </remarks> /// public unsafe void ProcessFrame(UnmanagedImage videoFrame) { lock ( _sync ) { // check previous frame if (_previousFrame == null) { // save image dimension _width = videoFrame.Width; _height = videoFrame.Height; // alocate memory for previous and current frames _previousFrame = UnmanagedImage.Create(_width, _height, videoFrame.PixelFormat); _motionFrame = UnmanagedImage.Create(_width, _height, PixelFormat.Format8bppIndexed); _motionSize = _motionFrame.Stride * _height; // temporary buffer if (_suppressNoise) { _tempFrame = UnmanagedImage.Create(_width, _height, PixelFormat.Format8bppIndexed); } // conpy source frame videoFrame.Copy(_previousFrame); return; } // check image dimension if ((videoFrame.Width != _width) || (videoFrame.Height != _height)) { return; } // pointers to previous and current frames byte *prevFrame = (byte *)_previousFrame.ImageData.ToPointer( ); byte *currFrame = (byte *)videoFrame.ImageData.ToPointer( ); byte *motion = (byte *)_motionFrame.ImageData.ToPointer( ); int bytesPerPixel = Tools.BytesPerPixel(videoFrame.PixelFormat); // difference value // 1 - get difference between frames // 2 - threshold the difference (accumulated over every channels) // 3 - copy current frame to previous frame for (int i = 0; i < _height; i++) { var currFrameLocal = currFrame; var prevFrameLocal = prevFrame; var motionLocal = motion; for (int j = 0; j < _width; j++) { var diff = 0; for (int nbBytes = 0; nbBytes < bytesPerPixel; nbBytes++) { // difference diff += Math.Abs(*currFrameLocal - *prevFrameLocal); // copy current frame to previous *prevFrameLocal = *currFrameLocal; currFrameLocal++; prevFrameLocal++; } diff /= bytesPerPixel; // threshold *motionLocal = (diff >= _differenceThreshold) ? (byte)255 : (byte)0; motionLocal++; } currFrame += videoFrame.Stride; prevFrame += _previousFrame.Stride; motion += _motionFrame.Stride; } if (_suppressNoise) { // suppress noise and calculate motion amount Accord.SystemTools.CopyUnmanagedMemory(_tempFrame.ImageData, _motionFrame.ImageData, _motionSize); _erosionFilter.Apply(_tempFrame, _motionFrame); } // calculate amount of motion pixels _pixelsChanged = 0; motion = (byte *)_motionFrame.ImageData.ToPointer( ); for (int i = 0; i < _motionSize; i++, motion++) { _pixelsChanged += (*motion & 1); } } }
/// <summary> /// Process new video frame. /// </summary> /// /// <param name="videoFrame">Video frame to process (detect motion in).</param> /// /// <remarks><para>Processes new frame from video source and detects motion in it.</para> /// /// <para>Check <see cref="MotionLevel"/> property to get information about amount of motion /// (changes) in the processed frame.</para> /// </remarks> /// public unsafe void ProcessFrame(UnmanagedImage videoFrame) { lock ( sync ) { // check background frame if (backgroundFrame == null) { // save image dimension width = videoFrame.Width; height = videoFrame.Height; // alocate memory for background frame backgroundFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed); frameSize = backgroundFrame.Stride * height; // convert source frame to grayscale Tools.ConvertToGrayscale(videoFrame, backgroundFrame); return; } // check image dimension if ((videoFrame.Width != width) || (videoFrame.Height != height)) { return; } // check motion frame if (motionFrame == null) { motionFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed); // temporary buffer if (suppressNoise) { tempFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed); } } // convert current image to grayscale Tools.ConvertToGrayscale(videoFrame, motionFrame); // pointers to background and current frames byte *backFrame; byte *currFrame; int diff; backFrame = (byte *)backgroundFrame.ImageData.ToPointer( ); currFrame = (byte *)motionFrame.ImageData.ToPointer( ); // 1 - get difference between frames // 2 - threshold the difference for (int i = 0; i < frameSize; i++, backFrame++, currFrame++) { // difference diff = (int)*currFrame - (int)*backFrame; // treshold *currFrame = ((diff >= differenceThreshold) || (diff <= differenceThresholdNeg)) ? (byte)255 : (byte)0; } if (suppressNoise) { // suppress noise and calculate motion amount AForge.SystemTools.CopyUnmanagedMemory(tempFrame.ImageData, motionFrame.ImageData, frameSize); erosionFilter.Apply(tempFrame, motionFrame); if (keepObjectEdges) { AForge.SystemTools.CopyUnmanagedMemory(tempFrame.ImageData, motionFrame.ImageData, frameSize); dilatationFilter.Apply(tempFrame, motionFrame); } } // calculate amount of motion pixels pixelsChanged = 0; byte *motion = (byte *)motionFrame.ImageData.ToPointer( ); for (int i = 0; i < frameSize; i++, motion++) { pixelsChanged += (*motion & 1); } } }
/// <summary> /// Process new video frame. /// </summary> /// /// <param name="videoFrame">Video frame to process (detect motion in).</param> /// /// <remarks><para>Processes new frame from video source and detects motion in it.</para> /// /// <para>Check <see cref="MotionLevel"/> property to get information about amount of motion /// (changes) in the processed frame.</para> /// </remarks> /// public unsafe void ProcessFrame(UnmanagedImage videoFrame) { lock ( sync ) { // check background frame if (backgroundFrame == null) { lastTimeMeasurment = DateTime.Now; // save image dimension width = videoFrame.Width; height = videoFrame.Height; // alocate memory for previous and current frames backgroundFrame = UnmanagedImage.Create(width, height, videoFrame.PixelFormat); motionFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed); frameSize = videoFrame.Stride * height; motionSize = motionFrame.Stride * motionFrame.Height; // temporary buffer if (suppressNoise) { tempFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed); } // set the backgroundframe videoFrame.Copy(backgroundFrame); return; } // check image dimension if ((videoFrame.Width != width) || (videoFrame.Height != height)) { return; } // pointers to background and current frames byte *backFrame; byte *currFrame; int diff; // update background frame if (millisecondsPerBackgroundUpdate == 0) { // update background frame using frame counter as a base if (++framesCounter == framesPerBackgroundUpdate) { framesCounter = 0; backFrame = (byte *)backgroundFrame.ImageData.ToPointer( ); currFrame = (byte *)videoFrame.ImageData.ToPointer( ); for (int i = 0; i < frameSize; i++, backFrame++, currFrame++) { diff = *currFrame - *backFrame; if (diff > 0) { (*backFrame)++; } else if (diff < 0) { (*backFrame)--; } } } } else { // update background frame using timer as a base // get current time and calculate difference DateTime currentTime = DateTime.Now; TimeSpan timeDff = currentTime - lastTimeMeasurment; // save current time as the last measurment lastTimeMeasurment = currentTime; int millisonds = (int)timeDff.TotalMilliseconds + millisecondsLeftUnprocessed; // save remainder so it could be taken into account in the future millisecondsLeftUnprocessed = millisonds % millisecondsPerBackgroundUpdate; // get amount for background update int updateAmount = (int)(millisonds / millisecondsPerBackgroundUpdate); backFrame = (byte *)backgroundFrame.ImageData.ToPointer( ); currFrame = (byte *)videoFrame.ImageData.ToPointer( ); for (int i = 0; i < frameSize; i++, backFrame++, currFrame++) { diff = *currFrame - *backFrame; if (diff > 0) { (*backFrame) += (byte)((diff < updateAmount) ? diff : updateAmount); } else if (diff < 0) { (*backFrame) += (byte)((-diff < updateAmount) ? diff : -updateAmount); } } } backFrame = (byte *)backgroundFrame.ImageData.ToPointer( ); currFrame = (byte *)videoFrame.ImageData.ToPointer( ); byte *motion = (byte *)motionFrame.ImageData.ToPointer( ); byte *currFrameLocal; byte *backFrameLocal; byte *motionLocal; int bytesPerPixel = Tools.BytesPerPixel(videoFrame.PixelFormat); // 1 - get difference between frames (accumulated on every channel) // 2 - threshold the difference for (int i = 0; i < height; i++) { currFrameLocal = currFrame; backFrameLocal = backFrame; motionLocal = motion; for (int j = 0; j < width; j++) { diff = 0; for (int nbBytes = 0; nbBytes < bytesPerPixel; nbBytes++) { // difference diff += Math.Abs((int)*currFrameLocal - (int)*backFrameLocal); currFrameLocal++; backFrameLocal++; } diff /= bytesPerPixel; *motionLocal = (diff >= differenceThreshold) ? (byte)255 : (byte)0; motionLocal++; } currFrame += videoFrame.Stride; backFrame += backgroundFrame.Stride; motion += motionFrame.Stride; } if (suppressNoise) { // suppress noise and calculate motion amount AForge.SystemTools.CopyUnmanagedMemory(tempFrame.ImageData, motionFrame.ImageData, motionSize); erosionFilter.Apply(tempFrame, motionFrame); if (keepObjectEdges) { AForge.SystemTools.CopyUnmanagedMemory(tempFrame.ImageData, motionFrame.ImageData, motionSize); dilatationFilter.Apply(tempFrame, motionFrame); } } // calculate amount of motion pixels pixelsChanged = 0; motion = (byte *)motionFrame.ImageData.ToPointer( ); for (int i = 0; i < motionSize; i++, motion++) { pixelsChanged += (*motion & 1); } } }
/// <summary> /// Process new video frame. /// </summary> /// /// <param name="videoFrame">Video frame to process (detect motion in).</param> /// /// <remarks><para>Processes new frame from video source and detects motion in it.</para> /// /// <para>Check <see cref="MotionLevel"/> property to get information about amount of motion /// (changes) in the processed frame.</para> /// </remarks> /// public unsafe void ProcessFrame(UnmanagedImage videoFrame) { //lock ( _sync ) { // check previous frame if (_previousFrame == null) { // save image dimension _width = videoFrame.Width; _height = videoFrame.Height; // alocate memory for previous and current frames _previousFrame = UnmanagedImage.Create(_width, _height, PixelFormat.Format8bppIndexed); _motionFrame = UnmanagedImage.Create(_width, _height, PixelFormat.Format8bppIndexed); _frameSize = _motionFrame.Stride * _height; // temporary buffer if (_suppressNoise) { _tempFrame = UnmanagedImage.Create(_width, _height, PixelFormat.Format8bppIndexed); } // convert source frame to grayscale Tools.ConvertToGrayscale(videoFrame, _previousFrame); return; } // check image dimension if ((videoFrame.Width != _width) || (videoFrame.Height != _height)) { return; } // convert current image to grayscale Tools.ConvertToGrayscale(videoFrame, _motionFrame); // pointers to previous and current frames byte *prevFrame = (byte *)_previousFrame.ImageData.ToPointer( ); byte *currFrame = (byte *)_motionFrame.ImageData.ToPointer( ); // difference value // 1 - get difference between frames // 2 - threshold the difference // 3 - copy current frame to previous frame for (int i = 0; i < _frameSize; i++, prevFrame++, currFrame++) { // difference var diff = *currFrame - *prevFrame; // copy current frame to previous *prevFrame = *currFrame; // treshold *currFrame = ((diff >= _differenceThreshold) || (diff <= _differenceThresholdNeg)) ? (byte)255 : (byte)0; } if (_suppressNoise) { // suppress noise and calculate motion amount AForge.SystemTools.CopyUnmanagedMemory(_tempFrame.ImageData, _motionFrame.ImageData, _frameSize); _erosionFilter.Apply(_tempFrame, _motionFrame); } // calculate amount of motion pixels _pixelsChanged = 0; byte *motion = (byte *)_motionFrame.ImageData.ToPointer( ); for (int i = 0; i < _frameSize; i++, motion++) { _pixelsChanged += (*motion & 1); } } }
public unsafe void ProcessFrame(UnmanagedImage videoFrame) { lock (sync) { if (backgroundFrame == null) { lastTimeMeasurment = DateTime.Now; width = videoFrame.Width; height = videoFrame.Height; backgroundFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed); motionFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed); frameSize = motionFrame.Stride * height; if (suppressNoise) { tempFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed); } Tools.ConvertToGrayscale(videoFrame, backgroundFrame); return; } if ((videoFrame.Width != width) || (videoFrame.Height != height)) { return; } Tools.ConvertToGrayscale(videoFrame, motionFrame); byte *backFrame; byte *currFrame; int diff; if (millisecondsPerBackgroundUpdate == 0) { if (++framesCounter == framesPerBackgroundUpdate) { framesCounter = 0; backFrame = (byte *)backgroundFrame.ImageData.ToPointer(); currFrame = (byte *)motionFrame.ImageData.ToPointer(); for (int i = 0; i < frameSize; i++, backFrame++, currFrame++) { diff = *currFrame - *backFrame; if (diff > 0) { (*backFrame)++; } else if (diff < 0) { (*backFrame)--; } } } } else { DateTime currentTime = DateTime.Now; TimeSpan timeDff = currentTime - lastTimeMeasurment; lastTimeMeasurment = currentTime; int millisonds = (int)timeDff.TotalMilliseconds + millisecondsLeftUnprocessed; millisecondsLeftUnprocessed = millisonds % millisecondsPerBackgroundUpdate; int updateAmount = (int)(millisonds / millisecondsPerBackgroundUpdate); backFrame = (byte *)backgroundFrame.ImageData.ToPointer(); currFrame = (byte *)motionFrame.ImageData.ToPointer(); for (int i = 0; i < frameSize; i++, backFrame++, currFrame++) { diff = *currFrame - *backFrame; if (diff > 0) { (*backFrame) += (byte)((diff < updateAmount) ? diff : updateAmount); } else if (diff < 0) { (*backFrame) += (byte)((-diff < updateAmount) ? diff : -updateAmount); } } } backFrame = (byte *)backgroundFrame.ImageData.ToPointer(); currFrame = (byte *)motionFrame.ImageData.ToPointer(); for (int i = 0; i < frameSize; i++, backFrame++, currFrame++) { diff = (int)*currFrame - (int)*backFrame; *currFrame = ((diff >= differenceThreshold) || (diff <= differenceThresholdNeg)) ? (byte)255 : (byte)0; } if (suppressNoise) { SystemTools.CopyUnmanagedMemory(tempFrame.ImageData, motionFrame.ImageData, frameSize); erosionFilter.Apply(tempFrame, motionFrame); if (keepObjectEdges) { SystemTools.CopyUnmanagedMemory(tempFrame.ImageData, motionFrame.ImageData, frameSize); dilatationFilter.Apply(tempFrame, motionFrame); } } pixelsChanged = 0; byte *motion = (byte *)motionFrame.ImageData.ToPointer(); for (int i = 0; i < frameSize; i++, motion++) { pixelsChanged += (*motion & 1); } } }
static void Main(string[] args) { try { bool parm_error = false; if (args.Length == 0) { exitWithHelp(); // Exit! No param } //set mandatory parm string in_path = ""; //optional parm bool cropcenter = false; bool emulate = false; bool debug = false; //optional blob parm bool blob = false; string bfilterw = ""; double bfilterw_min = 1; double bfilterw_max = 1; string bfilterh = ""; double bfilterh_min = 1; double bfilterh_max = 1; bool blob_noff = false; bool blob_noshape = false; bool blob_notrotate = false; string blob_zone = ""; double blob_zonex = 0.5; double blob_zoney = 0.5; //parsing parm for (int p = 0; p < args.Length; p++) { //for each parm get type and value //ex. -parm value -parm2 value2 //get parm switch (args[p]) { case "-debug": debug = true; break; case "-f": in_path = args[p + 1]; break; case "-cropcenter": cropcenter = true; break; case "-emulate": emulate = true; break; case "-blob": blob = true; break; case "-bfilterw": bfilterw = args[p + 1]; break; case "-bfilterh": bfilterh = args[p + 1]; break; case "-bnoff": blob_noff = true; break; case "-bzone": blob_zone = args[p + 1]; break; case "-bnoshape": blob_noshape = true; break; case "-bnotrotate": blob_notrotate = true; break; default: if (args[p].StartsWith("-")) { exitNotValid(args[p]); // Exit! Invalid param } break; } } //check mandatory param if (in_path.Equals("")) { exitWithHelp(); } //check others param if (!bfilterw.Equals("")) { RegexOptions options = RegexOptions.IgnoreCase | RegexOptions.IgnorePatternWhitespace | RegexOptions.Singleline; Regex pattern = new Regex(@"((?:[0]\.)?\d+)\-((?:[0]\.)?\d+)", options); Match match = pattern.Match(bfilterw); if (match.Success && match.Groups.Count.Equals(3)) { bfilterw_min = Convert.ToDouble(match.Groups[1].Value.Replace('.', ',')); bfilterw_max = Convert.ToDouble(match.Groups[2].Value.Replace('.', ',')); } else { exitWithError("Opzione '-bfilterw' non valida.", "Specificare i valori minimi e massimi nel seguente formato:", " -bfilterw valoremin-valoremax", " es. -bfilterw 0.30-0.40"); } } if (!bfilterh.Equals("")) { RegexOptions options = RegexOptions.IgnoreCase | RegexOptions.IgnorePatternWhitespace | RegexOptions.Singleline; Regex pattern = new Regex(@"((?:[0]\.)?\d+)\-((?:[0]\.)?\d+)", options); Match match = pattern.Match(bfilterh); if (match.Success && match.Groups.Count.Equals(3)) { bfilterh_min = Convert.ToDouble(match.Groups[1].Value.Replace('.', ',')); bfilterh_max = Convert.ToDouble(match.Groups[2].Value.Replace('.', ',')); } else { exitWithError("Opzione '-bfilterh' non valida.", "Specificare i valori minimi e massimi nel seguente formato:", " -bfilterh valoremin-valoremax", " es. -bfilterh 0.30-0.40"); } } if (!blob_zone.Equals("")) { RegexOptions options = RegexOptions.IgnoreCase | RegexOptions.IgnorePatternWhitespace | RegexOptions.Singleline; Regex pattern = new Regex(@"((?:[0]\.)?\d+)\,((?:[0]\.)?\d+)", options); Match match = pattern.Match(blob_zone); if (match.Success && match.Groups.Count.Equals(3)) { blob_zonex = Convert.ToDouble(match.Groups[1].Value.Replace('.', ',')); blob_zoney = Convert.ToDouble(match.Groups[2].Value.Replace('.', ',')); } else { exitWithError("Opzione '-bzone' non valida.", "Specificare le coordinate del punto dove cercare il barcode.", " -bzone x,y", " es. -bzone 0.5,0.5"); } } //check validity if (File.Exists(in_path)) { in_path = Path.GetFullPath(in_path); } else { exitFileNotFound(in_path); } //START Stopwatch stopWatch = new Stopwatch(); if (emulate) { stopWatch.Start(); } //Convert to image if PDF string tmp_path = ""; bool tmp_file = false; if (Path.GetExtension(in_path).Equals(".pdf")) { if (debug) { Console.WriteLine("Converting pdf..."); } tmp_path = in_path + ".png"; tmp_file = true; libImage.ConvertSingleImage(in_path, tmp_path, 300); } else { tmp_path = in_path; } //Load image in memory and del file System.Drawing.Bitmap tmp_img; using (System.Drawing.Bitmap img_source = (Bitmap)Bitmap.FromFile(tmp_path)) { tmp_img = new Bitmap(img_source); } if (tmp_file) { File.Delete(tmp_path); } //Get Info on page int page_w = tmp_img.Width; int page_h = tmp_img.Height; if (debug) { Console.WriteLine("File dimension: w=" + page_w + " h=" + page_h); } //Crop Center if (cropcenter) { if (debug) { Console.WriteLine("Cropping central image..."); } int crop_x = Convert.ToInt32(((double)tmp_img.Width * 0.3)), crop_y = Convert.ToInt32(((double)tmp_img.Height * 0.3)), crop_width = Convert.ToInt32(((double)tmp_img.Width * 0.7) - crop_x), crop_height = Convert.ToInt32(((double)tmp_img.Height * 0.7) - crop_y); //source = source.crop(crop_x, crop_y, crop_width, crop_height); tmp_img = tmp_img.Clone(new Rectangle(crop_x, crop_y, crop_width, crop_height), PixelFormat.Format32bppArgb); page_w = tmp_img.Width; page_h = tmp_img.Height; if (debug) { Console.WriteLine("New file dimension: w=" + page_w + " h=" + page_h); } } else { tmp_img = AForge.Imaging.Image.Clone(tmp_img, PixelFormat.Format32bppArgb); } //Blob Analysis if (blob) { if (debug) { Console.WriteLine("Starting Blob Analysis..."); } // filter GreyScale Grayscale filterg = new Grayscale(0.2125, 0.7154, 0.0721); tmp_img = filterg.Apply(tmp_img); Bitmap tmp_img_wrk = (Bitmap)tmp_img.Clone(); // filter Erosion3x3 BinaryErosion3x3 filter = new BinaryErosion3x3(); tmp_img_wrk = filter.Apply(tmp_img_wrk); tmp_img_wrk = filter.Apply(tmp_img_wrk); tmp_img_wrk = filter.Apply(tmp_img_wrk); tmp_img_wrk = filter.Apply(tmp_img_wrk); tmp_img_wrk = filter.Apply(tmp_img_wrk); tmp_img_wrk = filter.Apply(tmp_img_wrk); //Binarization SISThreshold filterSIS = new SISThreshold(); tmp_img_wrk = filterSIS.Apply(tmp_img_wrk); //Inversion Invert filterI = new Invert(); tmp_img_wrk = filterI.Apply(tmp_img_wrk); //Blob Analisys BlobCounterBase bc = new BlobCounter(); bc.FilterBlobs = true; if (!bfilterw.Equals("")) { bc.MinWidth = Convert.ToInt32(page_w * bfilterw_min); // 0.15 in proporzione è il 20% bc.MaxWidth = Convert.ToInt32(page_w * bfilterw_max); // 0.30 } if (!bfilterh.Equals("")) { bc.MinHeight = Convert.ToInt32(page_h * bfilterh_min); // 0.10 in proporzione è il 15% bc.MaxHeight = Convert.ToInt32(page_h * bfilterh_max); // 0.20 } if (debug) { Console.WriteLine("Searching blob (Dimension filter: w=" + bc.MinWidth + "-" + (bc.MaxWidth.Equals(int.MaxValue) ? "max" : bc.MaxWidth.ToString()) + " h=" + bc.MinHeight + "-" + (bc.MaxHeight.Equals(int.MaxValue) ? "max": bc.MaxHeight.ToString()) + ")"); } bc.ObjectsOrder = ObjectsOrder.Size; bc.ProcessImage(tmp_img_wrk); Blob[] blobs = bc.GetObjectsInformation(); if (debug) { Console.WriteLine("Blobs found: " + blobs.Count()); } //Esamina Blobs int i = 1; foreach (Blob b in blobs) { //Escludi blob contenitore (l'immagine stessa) if (b.Rectangle.Width == page_w) { if (debug) { Console.WriteLine("Blob " + i + ": skip! (is container)"); } i++; continue; } //check form factor if (!blob_noff) { double formf = (Convert.ToDouble(b.Rectangle.Width) / Convert.ToDouble(b.Rectangle.Height)) * 100; if (formf < 95) { //skip Form Factor Not a square if (debug) { Console.WriteLine("Blob " + i + ": Check 1 - Form factor > 95 Failed! (form factor is not square " + formf + "<95) Blob Skipped!"); Console.WriteLine("You can disable this check with -bnoff parameter."); } i++; continue; } if (debug) { Console.WriteLine("Blob " + i + ": Check 1 - Form factor > 95 " + formf + " Ok!"); } } else if (debug) { Console.WriteLine("Blob " + i + ": Check 1 - Form factor > 95 skipped by option -bnoff "); } //check zone if (!blob_zone.Equals("")) { Rectangle bZone = b.Rectangle; bZone.Inflate(Convert.ToInt32(b.Rectangle.Width * 0.2), Convert.ToInt32(b.Rectangle.Height * 0.2)); if (!bZone.Contains(Convert.ToInt32(page_w * blob_zonex), Convert.ToInt32(page_h * blob_zoney))) { //skip Zone Not in center if (debug) { Console.WriteLine("Blob " + i + ": Check 2 - Zone of blob Failed! (Not in the zone requested! blob zone:" + b.Rectangle.ToString() + " and requested point is at x=" + Convert.ToInt32(page_w * blob_zonex) + ",y=" + Convert.ToInt32(page_h * blob_zonex) + " ) Blob Skipped!"); } i++; continue; } if (debug) { Console.WriteLine("Blob " + i + ": Check 2 - Zone of blob contains " + Convert.ToInt32(page_w * blob_zonex) + "," + Convert.ToInt32(page_h * blob_zonex) + "... Ok!"); } } //check shape List <IntPoint> edgePoints = bc.GetBlobsEdgePoints(b); List <IntPoint> corners; SimpleShapeChecker shapeChecker = new SimpleShapeChecker(); if (shapeChecker.IsQuadrilateral(edgePoints, out corners)) { if (!blob_noshape) { PolygonSubType subType = shapeChecker.CheckPolygonSubType(corners); if (!subType.Equals(PolygonSubType.Square)) { //skip Not a square if (debug) { Console.WriteLine("Blob " + i + ": Check 3 - Shape is Square Failed! (Shape is not Square! " + subType.ToString() + " detected!) Blob Skipped!"); Console.WriteLine("You can disable this check with -bnoshape parameter."); } i++; continue; } else if (debug) { Console.WriteLine("Blob " + i + ": Check 3 - Shape is Square Ok!"); } } else if (debug) { Console.WriteLine("Blob " + i + ": Check 3 - Shape is Square skipped by option -bnoshape "); } } else { shapeChecker.ToString(); //skip Not a quadrilateral if (debug) { Console.WriteLine("Blob " + i + ": Check 3 - Shape is Square... Failed! (not a Quadrilateral! ConvexPolygon:" + shapeChecker.IsConvexPolygon(edgePoints, out corners) + " Triangle:" + shapeChecker.IsTriangle(edgePoints, out corners) + ") Blob Skipped!"); } i++; continue; } //if (debug){ Console.WriteLine("Blob " + i + ": Trying to decode..."); } //Calculate rotation angle 0 bottom left , 1 top left , 2 top right, 3 bottom right double dx = corners[2].X - corners[1].X; double dy = corners[1].Y - corners[2].Y; double ang = Math.Atan2(dx, dy) * (180 / Math.PI); if (ang > 90) { ang = ang - 90; } else { ang = 90 - ang; } //Extract Blob Rectangle cropRect = b.Rectangle; cropRect.Inflate(Convert.ToInt32(b.Rectangle.Width * 0.1), Convert.ToInt32(b.Rectangle.Height * 0.1)); Crop filter_blob = new Crop(cropRect); Bitmap tmp_img_blob = filter_blob.Apply(tmp_img); //Rotate if (!blob_notrotate) { RotateBilinear filterRotate = new RotateBilinear(ang, true); tmp_img_blob = filterRotate.Apply(tmp_img_blob); //Togli margine esterno (bande nere derivanti dalla rotazione) Rectangle cropRectInterno = new Rectangle(0, 0, tmp_img_blob.Width, tmp_img_blob.Height); cropRectInterno.Inflate(-Convert.ToInt32(b.Rectangle.Width * 0.05), -Convert.ToInt32(b.Rectangle.Height * 0.05)); Crop filterCropInterno = new Crop(cropRectInterno); tmp_img_blob = filterCropInterno.Apply(tmp_img_blob); if (debug) { Console.WriteLine("Blob " + i + ": Rotated and aligned! (angle:" + ang + ")"); } } else { if (debug) { Console.WriteLine("Blob " + i + ": Rotation skipped by option -bnotrotate (angle:" + ang + ")"); } } //Applica filtri var filter1 = new Median(); filter1.ApplyInPlace(tmp_img_blob); var filter2 = new OtsuThreshold(); filter2.ApplyInPlace(tmp_img_blob); //Decodifica if (debug) { Console.WriteLine("Blob " + i + ": Extracted! Trying to decode..."); } BarcodeReader reader = new BarcodeReader { AutoRotate = true }; Result result = reader.Decode(tmp_img_blob); //Output Results if (result != null) { if (emulate) { stopWatch.Stop(); Console.WriteLine("Success in " + stopWatch.Elapsed); } else { Console.WriteLine(result.Text); } Environment.Exit(0); } else if (debug) { Console.WriteLine("Blob " + i + ": Decode failed! (Result null)"); } } } else { BarcodeReader reader = new BarcodeReader { AutoRotate = true }; Result result = reader.Decode(tmp_img); //Output Results if (result != null) { if (emulate) { stopWatch.Stop(); Console.WriteLine(stopWatch.Elapsed); } else { Console.WriteLine(result.Text); Environment.Exit(0); } } else if (debug) { Console.WriteLine("Decode failed! (Result null)"); } } //Exit if (emulate && stopWatch.IsRunning) { stopWatch.Stop(); Console.WriteLine("Failure in " + stopWatch.Elapsed); } Environment.Exit(0); } catch (Exception ex) { Console.WriteLine("Fatal Error: " + ex.Message + "\n" + ex.InnerException); } }
/// <summary> /// Process new video frame. /// </summary> /// /// <param name="videoFrame">Video frame to process (detect motion in).</param> /// /// <remarks><para>Processes new frame from video source and detects motion in it.</para> /// /// <para>Check <see cref="MotionLevel"/> property to get information about amount of motion /// (changes) in the processed frame.</para> /// </remarks> /// public unsafe void ProcessFrame(UnmanagedImage videoFrame) { lock (sync) { // check background frame if (backgroundFrame == null) { lastTimeMeasurment = DateTime.Now; // save image dimension width = videoFrame.Width; height = videoFrame.Height; // alocate memory for previous and current frames backgroundFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed); motionFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed); frameSize = motionFrame.Stride * height; // temporary buffer if (suppressNoise) { tempFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed); } // convert source frame to grayscale Tools.ConvertToGrayscale(videoFrame, backgroundFrame); return; } // check image dimension if ((videoFrame.Width != width) || (videoFrame.Height != height)) { return; } // convert current image to grayscale Tools.ConvertToGrayscale(videoFrame, motionFrame); // pointers to background and current frames byte *backFrame; byte *currFrame; int diff; // update background frame if (millisecondsPerBackgroundUpdate == 0) { // update background frame using frame counter as a base if (++framesCounter == framesPerBackgroundUpdate) { framesCounter = 0; backFrame = (byte *)backgroundFrame.ImageData.ToPointer(); currFrame = (byte *)motionFrame.ImageData.ToPointer(); for (var i = 0; i < frameSize; i++, backFrame++, currFrame++) { diff = *currFrame - *backFrame; if (diff > 0) { (*backFrame)++; } else if (diff < 0) { (*backFrame)--; } } } } else { // update background frame using timer as a base // get current time and calculate difference var currentTime = DateTime.Now; var timeDff = currentTime - lastTimeMeasurment; // save current time as the last measurment lastTimeMeasurment = currentTime; var millisonds = (int)timeDff.TotalMilliseconds + millisecondsLeftUnprocessed; // save remainder so it could be taken into account in the future millisecondsLeftUnprocessed = millisonds % millisecondsPerBackgroundUpdate; // get amount for background update var updateAmount = millisonds / millisecondsPerBackgroundUpdate; backFrame = (byte *)backgroundFrame.ImageData.ToPointer(); currFrame = (byte *)motionFrame.ImageData.ToPointer(); for (var i = 0; i < frameSize; i++, backFrame++, currFrame++) { diff = *currFrame - *backFrame; if (diff > 0) { (*backFrame) += (byte)((diff < updateAmount) ? diff : updateAmount); } else if (diff < 0) { (*backFrame) += (byte)((-diff < updateAmount) ? diff : -updateAmount); } } } backFrame = (byte *)backgroundFrame.ImageData.ToPointer(); currFrame = (byte *)motionFrame.ImageData.ToPointer(); // 1 - get difference between frames // 2 - threshold the difference for (var i = 0; i < frameSize; i++, backFrame++, currFrame++) { // difference diff = *currFrame - *backFrame; // treshold *currFrame = ((diff >= differenceThreshold) || (diff <= differenceThresholdNeg)) ? 255 : 0; } if (suppressNoise) { // suppress noise and calculate motion amount AForge.SystemTools.CopyUnmanagedMemory(tempFrame.ImageData, motionFrame.ImageData, frameSize); erosionFilter.Apply(tempFrame, motionFrame); if (keepObjectEdges) { AForge.SystemTools.CopyUnmanagedMemory(tempFrame.ImageData, motionFrame.ImageData, frameSize); dilatationFilter.Apply(tempFrame, motionFrame); } } // calculate amount of motion pixels pixelsChanged = 0; var motion = (byte *)motionFrame.ImageData.ToPointer(); for (var i = 0; i < frameSize; i++, motion++) { pixelsChanged += (*motion & 1); } } }
/// <summary> /// Process new video frame. /// </summary> /// /// <param name="videoFrame">Video frame to process (detect motion in).</param> /// /// <remarks><para>Processes new frame from video source and detects motion in it.</para> /// /// <para>Check <see cref="MotionLevel"/> property to get information about amount of motion /// (changes) in the processed frame.</para> /// </remarks> /// public unsafe void ProcessFrame(UnmanagedImage videoFrame) { lock ( sync ) { // check background frame if (backgroundFrame == null) { // save image dimension width = videoFrame.Width; height = videoFrame.Height; // alocate memory for background frame backgroundFrame = UnmanagedImage.Create(width, height, videoFrame.PixelFormat); frameSize = backgroundFrame.Stride * height; // convert source frame to grayscale videoFrame.Copy(backgroundFrame); return; } // check image dimension if ((videoFrame.Width != width) || (videoFrame.Height != height)) { return; } // check motion frame if (motionFrame == null) { motionFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed); motionSize = motionFrame.Stride * height; // temporary buffer if (suppressNoise) { tempFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed); } } // pointers to background and current frames byte *backFrame; byte *currFrame; int diff; backFrame = (byte *)backgroundFrame.ImageData.ToPointer( ); currFrame = (byte *)videoFrame.ImageData.ToPointer( ); byte *motion = (byte *)motionFrame.ImageData.ToPointer( ); byte *currFrameLocal; byte *backFrameLocal; byte *motionLocal; int bytesPerPixel = Tools.BytesPerPixel(videoFrame.PixelFormat); // 1 - get difference between frames // 2 - threshold the difference (accumulated over every channels) for (int i = 0; i < height; i++) { currFrameLocal = currFrame; backFrameLocal = backFrame; motionLocal = motion; for (int j = 0; j < width; j++) { diff = 0; for (int nbBytes = 0; nbBytes < bytesPerPixel; nbBytes++) { // difference diff += Math.Abs((int)*currFrameLocal - (int)*backFrameLocal); currFrameLocal++; backFrameLocal++; } diff /= bytesPerPixel; // threshold *motionLocal = (diff >= differenceThreshold) ? (byte)255 : (byte)0; motionLocal++; } currFrame += videoFrame.Stride; backFrame += backgroundFrame.Stride; motion += motionFrame.Stride; } if (suppressNoise) { // suppress noise and calculate motion amount AForge.SystemTools.CopyUnmanagedMemory(tempFrame.ImageData, motionFrame.ImageData, motionSize); erosionFilter.Apply(tempFrame, motionFrame); if (keepObjectEdges) { AForge.SystemTools.CopyUnmanagedMemory(tempFrame.ImageData, motionFrame.ImageData, motionSize); dilatationFilter.Apply(tempFrame, motionFrame); } } // calculate amount of motion pixels pixelsChanged = 0; motion = (byte *)motionFrame.ImageData.ToPointer( ); for (int i = 0; i < motionSize; i++, motion++) { pixelsChanged += (*motion & 1); } } }
private void Detection() { var watch = System.Diagnostics.Stopwatch.StartNew(); if (Video.Image != null) { if (ModeList.selectedIndex == 0) { training = 1; int prev = AlphabetList.selectedIndex; if (AlphabetList.selectedIndex == 26 || prev == 26) { label = 67; } else if (AlphabetList.selectedIndex == -1) { label = prev; } else { label = AlphabetList.selectedIndex; } } else { training = 0; } ProgressBar.Visible = true; ProgressBar.Value = 0; ProgressBar.Maximum_Value = 9; ProgressBar.Value += 1; CapturedBox.Image = (Bitmap)Video.Image.Clone(); Bitmap src = new Bitmap(CapturedBox.Image); //skin detection var image = new Rectangle(0, 0, src.Width, src.Height); var value = src.LockBits(image, ImageLockMode.ReadWrite, src.PixelFormat); var size = Bitmap.GetPixelFormatSize(value.PixelFormat) / 8; var buffer = new byte[value.Width * value.Height * size]; Marshal.Copy(value.Scan0, buffer, 0, buffer.Length); System.Threading.Tasks.Parallel.Invoke( () => { Skin_process(buffer, 0, 0, value.Width / 2, value.Height / 2, value.Width, size); }, () => { Skin_process(buffer, 0, value.Height / 2, value.Width / 2, value.Height, value.Width, size); }, () => { Skin_process(buffer, value.Width / 2, 0, value.Width, value.Height / 2, value.Width, size); }, () => { Skin_process(buffer, value.Width / 2, value.Height / 2, value.Width, value.Height, value.Width, size); } ); Marshal.Copy(buffer, 0, value.Scan0, buffer.Length); src.UnlockBits(value); SkinBox.Image = src; if (Skin == 1) { ProgressBar.Value += 1; //Dilation & Erosion src = Grayscale.CommonAlgorithms.BT709.Apply(src); BinaryDilation3x3 dilatation = new BinaryDilation3x3(); BinaryErosion3x3 erosion = new BinaryErosion3x3(); for (int a = 1; a <= 10; a++) { src = dilatation.Apply(src); } for (int a = 1; a <= 10; a++) { src = erosion.Apply(src); } ProgressBar.Value += 1; NoiseBox.Image = src; //Blob try { ExtractBiggestBlob blob = new ExtractBiggestBlob(); src = blob.Apply(src); x = blob.BlobPosition.X; y = blob.BlobPosition.Y; ProgressBar.Value += 1; } catch { this.Show(); //MessageBox.Show("Lightning conditions are not good for detecting the gestures", "Bad Lights", MessageBoxButtons.OK, MessageBoxIcon.Information); } //Merge Bitmap srcImage = new Bitmap(CapturedBox.Image); Bitmap dstImage = new Bitmap(src); var srcrect = new Rectangle(0, 0, srcImage.Width, srcImage.Height); var dstrect = new Rectangle(0, 0, dstImage.Width, dstImage.Height); var srcdata = srcImage.LockBits(srcrect, ImageLockMode.ReadWrite, srcImage.PixelFormat); var dstdata = dstImage.LockBits(dstrect, ImageLockMode.ReadWrite, dstImage.PixelFormat); var srcdepth = Bitmap.GetPixelFormatSize(srcdata.PixelFormat) / 8; var dstdepth = Bitmap.GetPixelFormatSize(dstdata.PixelFormat) / 8; //bytes per pixel var srcbuffer = new byte[srcdata.Width * srcdata.Height * srcdepth]; var dstbuffer = new byte[dstdata.Width * dstdata.Height * dstdepth]; //copy pixels to buffer Marshal.Copy(srcdata.Scan0, srcbuffer, 0, srcbuffer.Length); Marshal.Copy(dstdata.Scan0, dstbuffer, 0, dstbuffer.Length); System.Threading.Tasks.Parallel.Invoke( () => { //upper-left Merge_process(srcbuffer, dstbuffer, x, 0, y, 0, x + (dstdata.Width / 2), dstdata.Width / 2, y + (dstdata.Height / 2), dstdata.Height / 2, srcdata.Width, dstdata.Width, srcdepth, dstdepth); }, () => { //upper-right Merge_process(srcbuffer, dstbuffer, x + (dstdata.Width / 2), dstdata.Width / 2, y, 0, x + (dstdata.Width), dstdata.Width, y + (dstdata.Height / 2), dstdata.Height / 2, srcdata.Width, dstdata.Width, srcdepth, dstdepth); }, () => { //lower-left Merge_process(srcbuffer, dstbuffer, x, 0, y + (dstdata.Height / 2), dstdata.Height / 2, x + (dstdata.Width / 2), dstdata.Width / 2, y + (dstdata.Height), dstdata.Height, srcdata.Width, dstdata.Width, srcdepth, dstdepth); }, () => { //lower-right Merge_process(srcbuffer, dstbuffer, x + (dstdata.Width / 2), dstdata.Width / 2, y + (dstdata.Height / 2), dstdata.Height / 2, x + (dstdata.Width), dstdata.Width, y + (dstdata.Height), dstdata.Height, srcdata.Width, dstdata.Width, srcdepth, dstdepth); } ); //Copy the buffer back to image Marshal.Copy(srcbuffer, 0, srcdata.Scan0, srcbuffer.Length); Marshal.Copy(dstbuffer, 0, dstdata.Scan0, dstbuffer.Length); srcImage.UnlockBits(srcdata); dstImage.UnlockBits(dstdata); src = dstImage; ProgressBar.Value += 1; CropBox.Image = src; //Resize ResizeBilinear resize = new ResizeBilinear(200, 200); src = resize.Apply(src); ProgressBar.Value += 1; //Edges src = Grayscale.CommonAlgorithms.BT709.Apply((Bitmap)src); SobelEdgeDetector edges = new SobelEdgeDetector(); src = edges.Apply(src); ProgressBar.Value += 1; EdgeDetectorBox.Image = src; //HOEF Bitmap block = new Bitmap(src); int[] edgescount = new int[50]; double[] norm = new double[200]; String text = null; int sum = 0; int z = 1; for (int p = 1; p <= 6; p++) { for (int q = 1; q <= 6; q++) { for (int x = (p - 1) * block.Width / 6; x < (p * block.Width / 6); x++) { for (int y = (q - 1) * block.Height / 6; y < (q * block.Height / 6); y++) { Color colorPixel = block.GetPixel(x, y); int r = colorPixel.R; int g = colorPixel.G; int b = colorPixel.B; if (r != 0 & g != 0 & b != 0) { edgescount[z]++; } } } z++; } } for (z = 1; z <= 36; z++) { sum = sum + edgescount[z]; } for (z = 1; z <= 36; z++) { norm[z] = (double)edgescount[z] / sum; text = text + " " + z.ToString() + ":" + norm[z].ToString(); } if (training == 1) { File.AppendAllText(@"D:\train.txt", label.ToString() + text + Environment.NewLine); ProgressBar.Value += 1; } else { File.WriteAllText(@"D:\test.txt", label.ToString() + text + Environment.NewLine); ProgressBar.Value += 1; //SVM Problem train = Problem.Read(@"D:\train.txt"); Problem test = Problem.Read(@"D:\test.txt"); Parameter parameter = new Parameter() { C = 32, Gamma = 8 }; Model model = Training.Train(train, parameter); Prediction.Predict(test, @"D:\result.txt", model, false); int value1 = Convert.ToInt32(File.ReadAllText(@"D:\result.txt")); String alphabet = null; if (value1 == 27) { alphabet += "Welcome "; } else if (value1 == 28) { alphabet += "Good Morning"; } else if (value1 == 29) { alphabet += "Thank You"; } else { alphabet += (char)(65 + value1); } OutputText.Text = alphabet; SpeechSynthesizer speechSynthesizer = new SpeechSynthesizer(); speechSynthesizer.SetOutputToDefaultAudioDevice(); speechSynthesizer.Volume = 100; speechSynthesizer.Rate = -2; speechSynthesizer.SelectVoiceByHints(VoiceGender.Female, VoiceAge.Child); speechSynthesizer.SpeakAsync(alphabet); if (alphabet == " ") { speechSynthesizer.SpeakAsync(OutputText.Text); } ProgressBar.Value += 1; } } else { this.Show(); } watch.Stop(); var time = (watch.ElapsedMilliseconds); float secs = (float)time / 1000; ExecutionTimeBox.Text = Convert.ToString(secs) + " " + "Seconds"; } }
/// <summary> /// Process new video frame. /// </summary> /// /// <param name="videoFrame">Video frame to process (detect motion in).</param> /// /// <remarks><para>Processes new frame from video source and detects motion in it.</para> /// /// <para>Check <see cref="MotionLevel"/> property to get information about amount of motion /// (changes) in the processed frame.</para> /// </remarks> /// public unsafe void ProcessFrame(UnmanagedImage videoFrame) { lock ( _sync ) { // check previous frame if (_previousFrame == null) { // save image dimension _width = videoFrame.Width; _height = videoFrame.Height; // alocate memory for previous and current frames _previousFrame = UnmanagedImage.Create(_width, _height, PixelFormat.Format8bppIndexed); _motionFrame = UnmanagedImage.Create(_width, _height, PixelFormat.Format8bppIndexed); _frameSize = _motionFrame.Stride * _height; // temporary buffer if (_suppressNoise) { _tempFrame = UnmanagedImage.Create(_width, _height, PixelFormat.Format8bppIndexed); } // convert source frame to grayscale Tools.ConvertToGrayscale(videoFrame, _previousFrame); return; } // check image dimension if ((videoFrame.Width != _width) || (videoFrame.Height != _height)) { return; } // convert current image to grayscale Tools.ConvertToGrayscale(videoFrame, _motionFrame); UInt64 *prevFrame = (UInt64 *)_previousFrame.ImageData.ToPointer(); UInt64 *currFrame = (UInt64 *)_motionFrame.ImageData.ToPointer(); // difference value // 1 - get difference between frames // 2 - threshold the difference // 3 - copy current frame to previous frame for (int i = 0; i < _frameSize / sizeof(UInt64); i++, prevFrame++, currFrame++) { // difference var diff = (*currFrame ^ *prevFrame) & _differenceThresholMask; // copy current frame to previous *prevFrame = *currFrame; // treshold *currFrame = 0; if ((diff & 0xFF00000000000000) != 0) // take care of the 1st byte { *currFrame |= 0xFF00000000000000; } if ((diff & 0x00FF000000000000) != 0) // take care of the 2nd byte { *currFrame |= 0x00FF000000000000; } if ((diff & 0x0000FF0000000000) != 0) // take care of the 3rd byte { *currFrame |= 0x0000FF0000000000; } if ((diff & 0x000000FF00000000) != 0) // take care of the 4th byte { *currFrame |= 0x000000FF00000000; } if ((diff & 0x00000000FF000000) != 0) // take care of the 5th byte { *currFrame |= 0x00000000FF000000; } if ((diff & 0x0000000000FF0000) != 0) // take care of the 6th byte { *currFrame |= 0x0000000000FF0000; } if ((diff & 0x000000000000FF00) != 0) // take care of the 7th byte { *currFrame |= 0x000000000000FF00; } if ((diff & 0x00000000000000FF) != 0) // take care of the 8th byte { *currFrame |= 0x00000000000000FF; } } if (_suppressNoise) { // suppress noise and calculate motion amount AForge.SystemTools.CopyUnmanagedMemory(_tempFrame.ImageData, _motionFrame.ImageData, _frameSize); _erosionFilter.Apply(_tempFrame, _motionFrame); } // calculate amount of motion pixels _pixelsChanged = 0; UInt64 *motion = (UInt64 *)_motionFrame.ImageData.ToPointer(); for (int i = 0; i < _frameSize / sizeof(UInt64); i++, motion++) { if ((*motion & 0xFF00000000000000) != 0) // take care of the 1st byte { _pixelsChanged++; } if ((*motion & 0x00FF000000000000) != 0) // take care of the 2nd byte { _pixelsChanged++; } if ((*motion & 0x0000FF0000000000) != 0) // take care of the 3rd byte { _pixelsChanged++; } if ((*motion & 0x000000FF00000000) != 0) // take care of the 4th byte { _pixelsChanged++; } if ((*motion & 0x00000000FF000000) != 0) // take care of the 5th byte { _pixelsChanged++; } if ((*motion & 0x0000000000FF0000) != 0) // take care of the 6th byte { _pixelsChanged++; } if ((*motion & 0x000000000000FF00) != 0) // take care of the 7th byte { _pixelsChanged++; } if ((*motion & 0x00000000000000FF) != 0) // take care of the 8th byte { _pixelsChanged++; } } } }