public void BinaryDilation3x3Test1() { string basePath = NUnit.Framework.TestContext.CurrentContext.TestDirectory; #region doc_binary_dilation_3x3 // Let's start with one of the default // color test images in the framework: var test = new TestImages(basePath); // Let's get Lena's picture Bitmap bmp = test["lena.bmp"]; // And transform it to a binary mask // using Niblack's threshold class var niblack = new NiblackThreshold(); Bitmap binary = niblack.Apply(bmp); // The result can be seen below: // ImageBox.Show(binary); // Now, let's finally apply the dilation // filter to the binarized image below: var dil3x3 = new BinaryDilation3x3(); Bitmap result = dil3x3.Apply(binary); // The result can be seen below: // ImageBox.Show(result); #endregion //result.Save(@"C:\Projects\morpho-dilation3x3-result.png"); //binary.Save(@"C:\Projects\morpho-dilation3x3-binary.png"); }
/// <summary> /// Process new video frame. /// </summary> /// /// <param name="videoFrame">Video frame to process (detect motion in).</param> /// /// <remarks><para>Processes new frame from video source and detects motion in it.</para> /// /// <para>Check <see cref="MotionLevel"/> property to get information about amount of motion /// (changes) in the processed frame.</para> /// </remarks> /// public unsafe void ProcessFrame(UnmanagedImage videoFrame) { lock ( _sync ) { // check background frame if (_backgroundFrame == null) { // save image dimension _width = videoFrame.Width; _height = videoFrame.Height; // alocate memory for background frame _backgroundFrame = UnmanagedImage.Create(_width, _height, videoFrame.PixelFormat); // convert source frame to grayscale videoFrame.Copy(_backgroundFrame); return; } // check image dimension if ((videoFrame.Width != _width) || (videoFrame.Height != _height)) { return; } // check motion frame if (_motionFrame == null) { _motionFrame = UnmanagedImage.Create(_width, _height, PixelFormat.Format8bppIndexed); _motionSize = _motionFrame.Stride * _height; // temporary buffer if (_suppressNoise) { _tempFrame = UnmanagedImage.Create(_width, _height, PixelFormat.Format8bppIndexed); } } // pointers to background and current frames var backFrame = (byte *)_backgroundFrame.ImageData.ToPointer( ); var currFrame = (byte *)videoFrame.ImageData.ToPointer( ); byte *motion = (byte *)_motionFrame.ImageData.ToPointer( ); int bytesPerPixel = Tools.BytesPerPixel(videoFrame.PixelFormat); // 1 - get difference between frames // 2 - threshold the difference (accumulated over every channels) for (int i = 0; i < _height; i++) { var currFrameLocal = currFrame; var backFrameLocal = backFrame; var motionLocal = motion; for (int j = 0; j < _width; j++) { var diff = 0; for (int nbBytes = 0; nbBytes < bytesPerPixel; nbBytes++) { // difference diff += Math.Abs(*currFrameLocal - *backFrameLocal); currFrameLocal++; backFrameLocal++; } diff /= bytesPerPixel; // threshold *motionLocal = (diff >= _differenceThreshold) ? (byte)255 : (byte)0; motionLocal++; } currFrame += videoFrame.Stride; backFrame += _backgroundFrame.Stride; motion += _motionFrame.Stride; } if (_suppressNoise) { // suppress noise and calculate motion amount Accord.SystemTools.CopyUnmanagedMemory(_tempFrame.ImageData, _motionFrame.ImageData, _motionSize); _erosionFilter.Apply(_tempFrame, _motionFrame); if (_keepObjectEdges) { Accord.SystemTools.CopyUnmanagedMemory(_tempFrame.ImageData, _motionFrame.ImageData, _motionSize); _dilatationFilter.Apply(_tempFrame, _motionFrame); } } // calculate amount of motion pixels _pixelsChanged = 0; motion = (byte *)_motionFrame.ImageData.ToPointer( ); for (int i = 0; i < _motionSize; i++, motion++) { _pixelsChanged += (*motion & 1); } } }
/// <summary> /// Process new video frame. /// </summary> /// /// <param name="videoFrame">Video frame to process (detect motion in).</param> /// /// <remarks><para>Processes new frame from video source and detects motion in it.</para> /// /// <para>Check <see cref="MotionLevel"/> property to get information about amount of motion /// (changes) in the processed frame.</para> /// </remarks> /// public unsafe void ProcessFrame(UnmanagedImage videoFrame) { lock ( _sync ) { // check background frame if (_backgroundFrame == null) { _lastTimeMeasurment = DateTime.Now; // save image dimension _width = videoFrame.Width; _height = videoFrame.Height; // alocate memory for previous and current frames _backgroundFrame = UnmanagedImage.Create(_width, _height, PixelFormat.Format8bppIndexed); _motionFrame = UnmanagedImage.Create(_width, _height, PixelFormat.Format8bppIndexed); _frameSize = _motionFrame.Stride * _height; // temporary buffer if (_suppressNoise) { _tempFrame = UnmanagedImage.Create(_width, _height, PixelFormat.Format8bppIndexed); } // convert source frame to grayscale Tools.ConvertToGrayscale(videoFrame, _backgroundFrame); return; } // check image dimension if ((videoFrame.Width != _width) || (videoFrame.Height != _height)) { return; } // convert current image to grayscale Tools.ConvertToGrayscale(videoFrame, _motionFrame); // pointers to background and current frames byte *backFrame; byte *currFrame; int diff; // update background frame if (_millisecondsPerBackgroundUpdate == 0) { // update background frame using frame counter as a base if (++_framesCounter == _framesPerBackgroundUpdate) { _framesCounter = 0; backFrame = (byte *)_backgroundFrame.ImageData.ToPointer( ); currFrame = (byte *)_motionFrame.ImageData.ToPointer( ); for (int i = 0; i < _frameSize; i++, backFrame++, currFrame++) { diff = *currFrame - *backFrame; if (diff > 0) { (*backFrame)++; } else if (diff < 0) { (*backFrame)--; } } } } else { // update background frame using timer as a base // get current time and calculate difference DateTime currentTime = DateTime.Now; TimeSpan timeDff = currentTime - _lastTimeMeasurment; // save current time as the last measurment _lastTimeMeasurment = currentTime; int millisonds = (int)timeDff.TotalMilliseconds + _millisecondsLeftUnprocessed; // save remainder so it could be taken into account in the future _millisecondsLeftUnprocessed = millisonds % _millisecondsPerBackgroundUpdate; // get amount for background update int updateAmount = (millisonds / _millisecondsPerBackgroundUpdate); backFrame = (byte *)_backgroundFrame.ImageData.ToPointer( ); currFrame = (byte *)_motionFrame.ImageData.ToPointer( ); for (int i = 0; i < _frameSize; i++, backFrame++, currFrame++) { diff = *currFrame - *backFrame; if (diff > 0) { (*backFrame) += (byte)((diff < updateAmount) ? diff : updateAmount); } else if (diff < 0) { (*backFrame) += (byte)((-diff < updateAmount) ? diff : -updateAmount); } } } backFrame = (byte *)_backgroundFrame.ImageData.ToPointer( ); currFrame = (byte *)_motionFrame.ImageData.ToPointer( ); // 1 - get difference between frames // 2 - threshold the difference for (int i = 0; i < _frameSize; i++, backFrame++, currFrame++) { // difference diff = *currFrame - *backFrame; // treshold *currFrame = ((diff >= _differenceThreshold) || (diff <= _differenceThresholdNeg)) ? (byte)255 : (byte)0; } if (_suppressNoise) { // suppress noise and calculate motion amount Accord.SystemTools.CopyUnmanagedMemory(_tempFrame.ImageData, _motionFrame.ImageData, _frameSize); _erosionFilter.Apply(_tempFrame, _motionFrame); if (_keepObjectEdges) { Accord.SystemTools.CopyUnmanagedMemory(_tempFrame.ImageData, _motionFrame.ImageData, _frameSize); _dilatationFilter.Apply(_tempFrame, _motionFrame); } } // calculate amount of motion pixels _pixelsChanged = 0; byte *motion = (byte *)_motionFrame.ImageData.ToPointer( ); for (int i = 0; i < _frameSize; i++, motion++) { _pixelsChanged += (*motion & 1); } } }
/// <summary> /// Process new video frame. /// </summary> /// /// <param name="videoFrame">Video frame to process (detect motion in).</param> /// /// <remarks><para>Processes new frame from video source and detects motion in it.</para> /// /// <para>Check <see cref="MotionLevel"/> property to get information about amount of motion /// (changes) in the processed frame.</para> /// </remarks> /// public void ProcessFrame(UnmanagedImage videoFrame) { lock (sync) { // check background frame if (backgroundFrame == null) { // save image dimension width = videoFrame.Width; height = videoFrame.Height; // alocate memory for background frame backgroundFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed); frameSize = backgroundFrame.Stride * height; // convert source frame to grayscale Accord.Vision.Tools.ConvertToGrayscale(videoFrame, backgroundFrame); return; } // check image dimension if ((videoFrame.Width != width) || (videoFrame.Height != height)) { return; } // check motion frame if (motionFrame == null) { motionFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed); // temporary buffer if (suppressNoise) { tempFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed); } } // convert current image to grayscale Accord.Vision.Tools.ConvertToGrayscale(videoFrame, motionFrame); unsafe { // pointers to background and current frames byte *backFrame; byte *currFrame; int diff; backFrame = (byte *)backgroundFrame.ImageData.ToPointer(); currFrame = (byte *)motionFrame.ImageData.ToPointer(); // 1 - get difference between frames // 2 - threshold the difference for (int i = 0; i < frameSize; i++, backFrame++, currFrame++) { // difference diff = (int)*currFrame - (int)*backFrame; // treshold *currFrame = ((diff >= differenceThreshold) || (diff <= differenceThresholdNeg)) ? (byte)255 : (byte)0; } if (suppressNoise) { // suppress noise and calculate motion amount Accord.SystemTools.CopyUnmanagedMemory(tempFrame.ImageData, motionFrame.ImageData, frameSize); erosionFilter.Apply(tempFrame, motionFrame); if (keepObjectEdges) { Accord.SystemTools.CopyUnmanagedMemory(tempFrame.ImageData, motionFrame.ImageData, frameSize); dilationFilter.Apply(tempFrame, motionFrame); } } // calculate amount of motion pixels pixelsChanged = 0; byte *motion = (byte *)motionFrame.ImageData.ToPointer(); for (int i = 0; i < frameSize; i++, motion++) { pixelsChanged += (*motion & 1); } } } }
private void Detection() { var watch = System.Diagnostics.Stopwatch.StartNew(); if (Video.Image != null) { if (ModeList.selectedIndex == 0) { training = 1; int prev = AlphabetList.selectedIndex; if (AlphabetList.selectedIndex == 26 || prev == 26) { label = 67; } else if (AlphabetList.selectedIndex == -1) { label = prev; } else { label = AlphabetList.selectedIndex; } } else { training = 0; } ProgressBar.Visible = true; ProgressBar.Value = 0; ProgressBar.Maximum_Value = 9; ProgressBar.Value += 1; CapturedBox.Image = (Bitmap)Video.Image.Clone(); Bitmap src = new Bitmap(CapturedBox.Image); //skin detection var image = new Rectangle(0, 0, src.Width, src.Height); var value = src.LockBits(image, ImageLockMode.ReadWrite, src.PixelFormat); var size = Bitmap.GetPixelFormatSize(value.PixelFormat) / 8; var buffer = new byte[value.Width * value.Height * size]; Marshal.Copy(value.Scan0, buffer, 0, buffer.Length); System.Threading.Tasks.Parallel.Invoke( () => { Skin_process(buffer, 0, 0, value.Width / 2, value.Height / 2, value.Width, size); }, () => { Skin_process(buffer, 0, value.Height / 2, value.Width / 2, value.Height, value.Width, size); }, () => { Skin_process(buffer, value.Width / 2, 0, value.Width, value.Height / 2, value.Width, size); }, () => { Skin_process(buffer, value.Width / 2, value.Height / 2, value.Width, value.Height, value.Width, size); } ); Marshal.Copy(buffer, 0, value.Scan0, buffer.Length); src.UnlockBits(value); SkinBox.Image = src; if (Skin == 1) { ProgressBar.Value += 1; //Dilation & Erosion src = Grayscale.CommonAlgorithms.BT709.Apply(src); BinaryDilation3x3 dilatation = new BinaryDilation3x3(); BinaryErosion3x3 erosion = new BinaryErosion3x3(); for (int a = 1; a <= 10; a++) { src = dilatation.Apply(src); } for (int a = 1; a <= 10; a++) { src = erosion.Apply(src); } ProgressBar.Value += 1; NoiseBox.Image = src; //Blob try { ExtractBiggestBlob blob = new ExtractBiggestBlob(); src = blob.Apply(src); x = blob.BlobPosition.X; y = blob.BlobPosition.Y; ProgressBar.Value += 1; } catch { this.Show(); //MessageBox.Show("Lightning conditions are not good for detecting the gestures", "Bad Lights", MessageBoxButtons.OK, MessageBoxIcon.Information); } //Merge Bitmap srcImage = new Bitmap(CapturedBox.Image); Bitmap dstImage = new Bitmap(src); var srcrect = new Rectangle(0, 0, srcImage.Width, srcImage.Height); var dstrect = new Rectangle(0, 0, dstImage.Width, dstImage.Height); var srcdata = srcImage.LockBits(srcrect, ImageLockMode.ReadWrite, srcImage.PixelFormat); var dstdata = dstImage.LockBits(dstrect, ImageLockMode.ReadWrite, dstImage.PixelFormat); var srcdepth = Bitmap.GetPixelFormatSize(srcdata.PixelFormat) / 8; var dstdepth = Bitmap.GetPixelFormatSize(dstdata.PixelFormat) / 8; //bytes per pixel var srcbuffer = new byte[srcdata.Width * srcdata.Height * srcdepth]; var dstbuffer = new byte[dstdata.Width * dstdata.Height * dstdepth]; //copy pixels to buffer Marshal.Copy(srcdata.Scan0, srcbuffer, 0, srcbuffer.Length); Marshal.Copy(dstdata.Scan0, dstbuffer, 0, dstbuffer.Length); System.Threading.Tasks.Parallel.Invoke( () => { //upper-left Merge_process(srcbuffer, dstbuffer, x, 0, y, 0, x + (dstdata.Width / 2), dstdata.Width / 2, y + (dstdata.Height / 2), dstdata.Height / 2, srcdata.Width, dstdata.Width, srcdepth, dstdepth); }, () => { //upper-right Merge_process(srcbuffer, dstbuffer, x + (dstdata.Width / 2), dstdata.Width / 2, y, 0, x + (dstdata.Width), dstdata.Width, y + (dstdata.Height / 2), dstdata.Height / 2, srcdata.Width, dstdata.Width, srcdepth, dstdepth); }, () => { //lower-left Merge_process(srcbuffer, dstbuffer, x, 0, y + (dstdata.Height / 2), dstdata.Height / 2, x + (dstdata.Width / 2), dstdata.Width / 2, y + (dstdata.Height), dstdata.Height, srcdata.Width, dstdata.Width, srcdepth, dstdepth); }, () => { //lower-right Merge_process(srcbuffer, dstbuffer, x + (dstdata.Width / 2), dstdata.Width / 2, y + (dstdata.Height / 2), dstdata.Height / 2, x + (dstdata.Width), dstdata.Width, y + (dstdata.Height), dstdata.Height, srcdata.Width, dstdata.Width, srcdepth, dstdepth); } ); //Copy the buffer back to image Marshal.Copy(srcbuffer, 0, srcdata.Scan0, srcbuffer.Length); Marshal.Copy(dstbuffer, 0, dstdata.Scan0, dstbuffer.Length); srcImage.UnlockBits(srcdata); dstImage.UnlockBits(dstdata); src = dstImage; ProgressBar.Value += 1; CropBox.Image = src; //Resize ResizeBilinear resize = new ResizeBilinear(200, 200); src = resize.Apply(src); ProgressBar.Value += 1; //Edges src = Grayscale.CommonAlgorithms.BT709.Apply((Bitmap)src); SobelEdgeDetector edges = new SobelEdgeDetector(); src = edges.Apply(src); ProgressBar.Value += 1; EdgeDetectorBox.Image = src; //HOEF Bitmap block = new Bitmap(src); int[] edgescount = new int[50]; double[] norm = new double[200]; String text = null; int sum = 0; int z = 1; for (int p = 1; p <= 6; p++) { for (int q = 1; q <= 6; q++) { for (int x = (p - 1) * block.Width / 6; x < (p * block.Width / 6); x++) { for (int y = (q - 1) * block.Height / 6; y < (q * block.Height / 6); y++) { Color colorPixel = block.GetPixel(x, y); int r = colorPixel.R; int g = colorPixel.G; int b = colorPixel.B; if (r != 0 & g != 0 & b != 0) { edgescount[z]++; } } } z++; } } for (z = 1; z <= 36; z++) { sum = sum + edgescount[z]; } for (z = 1; z <= 36; z++) { norm[z] = (double)edgescount[z] / sum; text = text + " " + z.ToString() + ":" + norm[z].ToString(); } if (training == 1) { File.AppendAllText(@"D:\train.txt", label.ToString() + text + Environment.NewLine); ProgressBar.Value += 1; } else { File.WriteAllText(@"D:\test.txt", label.ToString() + text + Environment.NewLine); ProgressBar.Value += 1; //SVM Problem train = Problem.Read(@"D:\train.txt"); Problem test = Problem.Read(@"D:\test.txt"); Parameter parameter = new Parameter() { C = 32, Gamma = 8 }; Model model = Training.Train(train, parameter); Prediction.Predict(test, @"D:\result.txt", model, false); int value1 = Convert.ToInt32(File.ReadAllText(@"D:\result.txt")); String alphabet = null; if (value1 == 27) { alphabet += "Welcome "; } else if (value1 == 28) { alphabet += "Good Morning"; } else if (value1 == 29) { alphabet += "Thank You"; } else { alphabet += (char)(65 + value1); } OutputText.Text = alphabet; SpeechSynthesizer speechSynthesizer = new SpeechSynthesizer(); speechSynthesizer.SetOutputToDefaultAudioDevice(); speechSynthesizer.Volume = 100; speechSynthesizer.Rate = -2; speechSynthesizer.SelectVoiceByHints(VoiceGender.Female, VoiceAge.Child); speechSynthesizer.SpeakAsync(alphabet); if (alphabet == " ") { speechSynthesizer.SpeakAsync(OutputText.Text); } ProgressBar.Value += 1; } } else { this.Show(); } watch.Stop(); var time = (watch.ElapsedMilliseconds); float secs = (float)time / 1000; ExecutionTimeBox.Text = Convert.ToString(secs) + " " + "Seconds"; } }
/// <summary> /// Process new video frame. /// </summary> /// /// <param name="videoFrame">Video frame to process (detect motion in).</param> /// /// <remarks><para>Processes new frame from video source and detects motion in it.</para> /// /// <para>Check <see cref="MotionLevel"/> property to get information about amount of motion /// (changes) in the processed frame.</para> /// </remarks> /// public unsafe void ProcessFrame(UnmanagedImage videoFrame) { lock ( _sync ) { // check background frame if (_backgroundFrame == null) { // save image dimension _width = videoFrame.Width; _height = videoFrame.Height; // alocate memory for background frame _backgroundFrame = UnmanagedImage.Create(_width, _height, PixelFormat.Format8bppIndexed); _frameSize = _backgroundFrame.Stride * _height; // convert source frame to grayscale Tools.ConvertToGrayscale(videoFrame, _backgroundFrame); return; } // check image dimension if ((videoFrame.Width != _width) || (videoFrame.Height != _height)) { return; } // check motion frame if (_motionFrame == null) { _motionFrame = UnmanagedImage.Create(_width, _height, PixelFormat.Format8bppIndexed); // temporary buffer if (_suppressNoise) { _tempFrame = UnmanagedImage.Create(_width, _height, PixelFormat.Format8bppIndexed); } } // convert current image to grayscale Tools.ConvertToGrayscale(videoFrame, _motionFrame); // pointers to background and current frames var backFrame = (byte *)_backgroundFrame.ImageData.ToPointer( ); var currFrame = (byte *)_motionFrame.ImageData.ToPointer( ); // 1 - get difference between frames // 2 - threshold the difference for (int i = 0; i < _frameSize; i++, backFrame++, currFrame++) { // difference var diff = *currFrame - *backFrame; // treshold *currFrame = ((diff >= _differenceThreshold) || (diff <= _differenceThresholdNeg)) ? (byte)255 : (byte)0; } if (_suppressNoise) { // suppress noise and calculate motion amount Accord.SystemTools.CopyUnmanagedMemory(_tempFrame.ImageData, _motionFrame.ImageData, _frameSize); _erosionFilter.Apply(_tempFrame, _motionFrame); if (_keepObjectEdges) { Accord.SystemTools.CopyUnmanagedMemory(_tempFrame.ImageData, _motionFrame.ImageData, _frameSize); _dilatationFilter.Apply(_tempFrame, _motionFrame); } } // calculate amount of motion pixels _pixelsChanged = 0; byte *motion = (byte *)_motionFrame.ImageData.ToPointer( ); for (int i = 0; i < _frameSize; i++, motion++) { _pixelsChanged += (*motion & 1); } } }