/// <summary>
        /// Apply filter to an image in unmanaged memory.
        /// </summary>
        ///
        /// <param name="image">Source image in unmanaged memory to apply filter to.</param>
        ///
        /// <returns>Returns filter's result obtained by applying the filter to
        /// the source image.</returns>
        ///
        /// <remarks>The method keeps the source image unchanged and returns
        /// the result of image processing filter as new image.</remarks>
        ///
        /// <exception cref="UnsupportedImageFormatException">Unsupported pixel format of the source image.</exception>
        ///
        public UnmanagedImage Apply(UnmanagedImage image)
        {
            // check pixel format of the source image
            CheckSourceFormat(image.PixelFormat);

            // get new image size
            Size newSize = CalculateNewImageSize(image);

            // create new destination image
            UnmanagedImage dstImage = UnmanagedImage.Create(newSize.Width, newSize.Height, FormatTranslations[image.PixelFormat]);

            // process the filter
            ProcessFilter(image, dstImage);

            return(dstImage);
        }
Esempio n. 2
0
        static void Main(string[] args)
        {
            var img = new Bgr <byte> [480, 640];

            //***********************************************************************************************************************************************************************
            Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine("********* TColor[,] <=> Image<> conversions (built-in) ****************"); Console.ResetColor();
            //to Image<>
            Image <Bgr <byte> > lockedImg = img.Lock();
            //from Image<>
            var arr = lockedImg.Clone();

            //***********************************************************************************************************************************************************************
            Console.WriteLine();
            Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine("********* Image<,> <=> OpenCV conversions (built-in) ****************"); Console.ResetColor();
            //to IplImage
            IplImage iplImage;

            using (var uImg = img.Lock())
            {
                iplImage = uImg.AsOpenCvImage(); //data is shared
            }
            //from IplImage
            var imgFromIpl = iplImage.AsImage();

            //***********************************************************************************************************************************************************************
            Console.WriteLine();
            Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine("*********** Image<,> <=> Bitmap conversions (BitmapInterop) ****************"); Console.ResetColor();
            //to Bitmap
            var bmp = img.ToBitmap();
            //from Bitmap
            var imgFromBmp = bmp.ToArray();

            //***********************************************************************************************************************************************************************
            Console.WriteLine();
            Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine("****** TColor[,] <=> AForge UnmanagedImage conversions  (AForgeInterop) *******"); Console.ResetColor();
            //to AForge image
            using (var uImg = img.Lock())
            {
                UnmanagedImage uIm = uImg.AsAForgeImage(); //data is shared
            }

            //from AForge image
            UnmanagedImage aforgeIm      = UnmanagedImage.Create(640, 480, System.Drawing.Imaging.PixelFormat.Format32bppArgb);
            var            imgFromAForge = aforgeIm.AsImage(); //TODO: extensions directly to array
        }
 public PreProcess(UISettings ui, FileData file)
 {
     try {
         ExtendFileData(file);
         BitmapOriginal  = (ui.WorkDirectory + "\\" + FileName).FileTo24bbpRgb(ResizeRatio: ui.ResizeValue, FrameCrop: ui.CropValue, ImageZoom: 1, RotateDegree: ui.RotateDegree);
         UnmanagedMarkup = UnmanagedImage.FromManagedImage(BitmapOriginal);
         ImageStatistics stats    = null;
         Threshold       AFbinary = new Threshold(1);
         Grayscale       AFgray   = new Grayscale(0.1, 0.7, 0.2);
         if (ui.ExcludeColorSwitch && ui.ExcludeColorRadius > 0)
         {
             System.Windows.Media.Color excolor   = (System.Windows.Media.Color)System.Windows.Media.ColorConverter.ConvertFromString(ui.ExcludeColorHex);
             EuclideanColorFiltering    AFexcolor = new EuclideanColorFiltering(new RGB(excolor.R, excolor.G, excolor.B), (short)ui.ExcludeColorRadius);
             UnmanagedExclude = AFbinary.Apply(AFgray.Apply(AFexcolor.Apply(UnmanagedMarkup)));
         }
         else
         {
             UnmanagedExclude = UnmanagedImage.Create(UnmanagedMarkup.Width, UnmanagedMarkup.Height, PixelFormat.Format8bppIndexed);
         }
         if (ui.WhiteBalanceSwitch || ui.BlackBalanceSwitch)                 // need to apply auto white/black balance
         {
             Invert AFinvert = new Invert();
             stats = new ImageStatistics(UnmanagedMarkup, AFinvert.Apply(UnmanagedExclude));
             int          lowend       = (ui.BlackBalanceSwitch) ? (int)Math.Round(0.333d * (stats.RedWithoutBlack.Center2QuantileValue(ui.BlackBalance) + stats.GreenWithoutBlack.Center2QuantileValue(ui.BlackBalance) + stats.BlueWithoutBlack.Center2QuantileValue(ui.BlackBalance))):0;
             LevelsLinear levelsLinear = new LevelsLinear {
                 InRed   = new IntRange(lowend, (ui.WhiteBalanceSwitch) ? stats.RedWithoutBlack.Center2QuantileValue(ui.WhiteBalance) : 255),
                 InGreen = new IntRange(lowend, (ui.WhiteBalanceSwitch) ? stats.GreenWithoutBlack.Center2QuantileValue(ui.WhiteBalance) : 255),
                 InBlue  = new IntRange(lowend, (ui.WhiteBalanceSwitch) ? stats.BlueWithoutBlack.Center2QuantileValue(ui.WhiteBalance) : 255),
             };
             //LevelsLinear levelsLinear = new LevelsLinear {
             //	InRed=new IntRange((ui.BlackBalanceSwitch)?stats.RedWithoutBlack.Center2QuantileValue(ui.BlackBalance):0, (ui.WhiteBalanceSwitch)?stats.RedWithoutBlack.Center2QuantileValue(ui.WhiteBalance):255),
             //	InGreen=new IntRange((ui.BlackBalanceSwitch)?stats.GreenWithoutBlack.Center2QuantileValue(ui.BlackBalance):0, (ui.WhiteBalanceSwitch)?stats.GreenWithoutBlack.Center2QuantileValue(ui.WhiteBalance):255),
             //	InBlue=new IntRange((ui.BlackBalanceSwitch)?stats.BlueWithoutBlack.Center2QuantileValue(ui.BlackBalance):0, (ui.WhiteBalanceSwitch)?stats.BlueWithoutBlack.Center2QuantileValue(ui.WhiteBalance):255),
             //};
             levelsLinear.ApplyInPlace(UnmanagedMarkup);
         }
         if (ui.GaussianBlurSwitch && ui.GaussianBlur != 0)                                           // Gaussian Blur and Darken
         {
             GaussianBlur AFgblur     = new GaussianBlur(11.0, Math.Max(ui.GaussianBlur, 0) * 2 + 1); // Gaussian Blur sigma = 8.0 kernel size = 7
             Intersect    AFintersect = new Intersect(AFgblur.Apply(UnmanagedMarkup));
             UnmanagedMarkup = AFintersect.Apply(UnmanagedMarkup);
         }
         UnmanagedGray = AFgray.Apply(UnmanagedMarkup);               // directly turn into gray
     } catch { throw new Exception("Error Occured During PreProcessing"); }
 }
        public Contour Extract(UnmanagedImage blob)
        {
            switch (Method)
            {
            case PreprocessMethod.ExtendAndDilatation:
                // extend with 1px indention
                UnmanagedImage preprocessed = UnmanagedImage.Create(blob.Width + 2, blob.Height + 2, blob.PixelFormat);
                blob.CopyWithOffset(preprocessed, new IntPoint(1, 1));

                // dilatation
                m_dilatation.ApplyInPlace(preprocessed);

                // delete internal pixels
                m_deleteInternal.ApplyInPlace(preprocessed);

                // trace edge
                return(m_edgeTracer.Extract(preprocessed));

            case PreprocessMethod.Scale:
                int scaleMultiplier = 3;
                m_resizer.NewHeight = blob.Height * scaleMultiplier;
                m_resizer.NewWidth  = blob.Width * scaleMultiplier;

                // scale blob
                UnmanagedImage resized = m_resizer.Apply(blob);

                // delete internal pixels
                m_deleteInternal.ApplyInPlace(resized);

                // trace edge
                return(m_edgeTracer.Extract(resized));

            case PreprocessMethod.None:
                // delete internal pixels
                m_deleteInternal.ApplyInPlace(blob);

                // trace edge
                return(m_edgeTracer.Extract(blob));

            default:
                throw new Exception();
            }
        }
Esempio n. 5
0
        public void SetPixelTestUnsupported(PixelFormat pixelFormat)
        {
            UnmanagedImage image = UnmanagedImage.Create(320, 240, pixelFormat);
            Color          color = Color.White;
            byte           value = 255;

            image.SetPixel(0, 0, color);
            image.SetPixel(319, 0, color);
            image.SetPixel(0, 239, color);
            image.SetPixel(319, 239, value);
            image.SetPixel(160, 120, value);

            image.SetPixel(-1, -1, color);
            image.SetPixel(320, 0, color);
            image.SetPixel(0, 240, value);
            image.SetPixel(320, 240, value);

            Assert.Throws <UnsupportedImageFormatException>(() => image.CollectActivePixels(), "");
        }
Esempio n. 6
0
        // Create motion zones' image
        private unsafe void CreateMotionZonesFrame( )
        {
            lock ( _sync )
            {
                _area = 0;
                // free previous motion zones frame
                if (_zonesFrame != null)
                {
                    _zonesFrame.Dispose( );
                    _zonesFrame = null;
                }

                // create motion zones frame only in the case if the algorithm has processed at least one frame
                if ((_motionZones != null) && (_motionZones.Length != 0) && (_videoWidth != 0))
                {
                    _zonesFrame = UnmanagedImage.Create(_videoWidth, _videoHeight, PixelFormat.Format8bppIndexed);

                    var imageRect = new Rectangle(0, 0, _videoWidth, _videoHeight);

                    // draw all motion zones on motion frame
                    foreach (Rectangle rect in _motionZones)
                    {
                        rect.Intersect(imageRect);

                        // rectangle's dimension
                        int rectWidth  = rect.Width;
                        int rectHeight = rect.Height;

                        // start pointer
                        int   stride = _zonesFrame.Stride;
                        byte *ptr    = (byte *)_zonesFrame.ImageData.ToPointer( ) + rect.Y * stride + rect.X;

                        for (int y = 0; y < rectHeight; y++)
                        {
                            SystemTools.SetUnmanagedMemory(ptr, 255, rectWidth);
                            ptr += stride;
                        }
                        _area += rect.Width * rect.Height;
                    }
                }
            }
        }
Esempio n. 7
0
        public void SetPixelsTestUnsupported(PixelFormat pixelFormat)
        {
            UnmanagedImage  image  = UnmanagedImage.Create(320, 240, pixelFormat);
            Color           color  = Color.White;
            List <IntPoint> points = new List <IntPoint>();

            points.Add(new IntPoint(0, 0));
            points.Add(new IntPoint(319, 0));
            points.Add(new IntPoint(0, 239));
            points.Add(new IntPoint(319, 239));
            points.Add(new IntPoint(160, 120));

            points.Add(new IntPoint(-1, -1));
            points.Add(new IntPoint(320, 0));
            points.Add(new IntPoint(0, 240));
            points.Add(new IntPoint(320, 240));

            Assert.Throws <UnsupportedImageFormatException>(() => image.SetPixels(points, color),
                                                            "The pixel format is not supported: Format32bppPArgb");
        }
        /// <summary>
        /// Set background frame.
        /// </summary>
        ///
        /// <param name="backgroundFrame">Background frame to set.</param>
        ///
        /// <remarks><para>The method sets background frame, which will be used to calculate
        /// difference with.</para></remarks>
        ///
        public void SetBackgroundFrame(UnmanagedImage backgroundFrame)
        {
            // reset motion detection algorithm
            Reset(true);

            lock ( _sync )
            {
                // save image dimension
                _width  = backgroundFrame.Width;
                _height = backgroundFrame.Height;

                // alocate memory for previous and current frames
                _backgroundFrame = UnmanagedImage.Create(_width, _height, backgroundFrame.PixelFormat);

                // convert source frame to grayscale
                backgroundFrame.Copy(_backgroundFrame);

                _manuallySetBackgroundFrame = true;
            }
        }
Esempio n. 9
0
        private void BtnDetectEdges_Click(object sender, EventArgs e)
        {
            try
            {
                ProgressBar.Maximum = 3;
                ProgressBar.Value   = 0;

                UnmanagedImage image = UnmanagedImage.FromManagedImage((Bitmap)ImgInput.Image);

                // 1 - grayscaling
                UnmanagedImage grayImage = null;

                if (image.PixelFormat == PixelFormat.Format8bppIndexed)
                {
                    grayImage = image;
                }
                else
                {
                    grayImage = UnmanagedImage.Create(image.Width, image.Height,
                                                      PixelFormat.Format8bppIndexed);
                    Grayscale.CommonAlgorithms.BT709.Apply(image, grayImage);
                }
                ProgressBar.Value++;

                // 2 - Edge detection
                DifferenceEdgeDetector edgeDetector = new DifferenceEdgeDetector();
                UnmanagedImage         edgesImage   = edgeDetector.Apply(grayImage);
                ProgressBar.Value++;

                // 3 - Threshold edges
                Threshold thresholdFilter = new Threshold((int)NumericTrashold.Value);
                thresholdFilter.ApplyInPlace(edgesImage);
                ProgressBar.Value++;

                ImgOutput.Image = edgesImage.ToManagedImage();
            }
            catch (Exception exception)
            {
                MessageBox.Show(exception.Message);
            }
        }
        /// <summary>
        /// Set background frame.
        /// </summary>
        ///
        /// <param name="backgroundFrame">Background frame to set.</param>
        ///
        /// <remarks><para>The method sets background frame, which will be used to calculate
        /// difference with.</para></remarks>
        ///
        public void SetBackgroundFrame(UnmanagedImage backgroundFrame)
        {
            // reset motion detection algorithm
            Reset(true);

            lock ( sync )
            {
                // save image dimension
                width  = backgroundFrame.Width;
                height = backgroundFrame.Height;

                // alocate memory for previous and current frames
                this.backgroundFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed);
                frameSize            = this.backgroundFrame.Stride * height;

                // convert source frame to grayscale
                Tools.ConvertToGrayscale(backgroundFrame, this.backgroundFrame);

                manuallySetBackgroundFrame = true;
            }
        }
Esempio n. 11
0
    // Update is called once per frame
    void Update()
    {
        kinect = devOrEmu.getKinect();
        if (kinect.pollColor() == true)
        {
            Color32[] colorKinect = kinect.getColor();

            for (int i = 0; i < 640; i++)
            {
                for (int j = 0; j < 480; j++)
                {
                    int index = i * 480 + j;
                    int a     = colorKinect[index].a;
                    int r     = colorKinect[index].r;
                    int g     = colorKinect[index].g;
                    int b     = colorKinect[index].b;
                    bitmapTest.SetPixel(i, j, System.Drawing.Color.FromArgb(a, r, g, b));
                }
            }

            image = UnmanagedImage.FromManagedImage(bitmapTest);
            //var bitmap = image.ToManagedImage();

            if (image.PixelFormat == System.Drawing.Imaging.PixelFormat.Format8bppIndexed)
            {
                grayImage = image;
            }
            else
            {
                if (grayImage != null)
                {
                    grayImage.Dispose();
                }
                grayImage = UnmanagedImage.Create(image.Width, image.Height,
                                                  System.Drawing.Imaging.PixelFormat.Format8bppIndexed);
                Grayscale.CommonAlgorithms.BT709.Apply(image, grayImage);
            }
            scan_code();
        }
    }
Esempio n. 12
0
        // Create motion zones' image
        private unsafe void CreateMotionZonesFrame()
        {
            lock (sync)
            {
                // free previous motion zones frame
                if (zonesFrame != null)
                {
                    zonesFrame.Dispose();
                    zonesFrame = null;
                }

                // create motion zones frame only in the case if the algorithm has processed at least one frame
                if ((motionZones != null) && (motionZones.Length != 0) && (videoWidth != 0))
                {
                    zonesFrame = UnmanagedImage.Create(videoWidth, videoHeight, PixelFormat.Format8bppIndexed);

                    var imageRect = new Rectangle(0, 0, videoWidth, videoHeight);

                    // draw all motion zones on motion frame
                    foreach (var rect in motionZones)
                    {
                        rect.Intersect(imageRect);

                        // rectangle's dimenstion
                        var rectWidth  = rect.Width;
                        var rectHeight = rect.Height;

                        // start pointer
                        var stride = zonesFrame.Stride;
                        var ptr    = (byte *)zonesFrame.ImageData.ToPointer() + rect.Y * stride + rect.X;

                        for (var y = 0; y < rectHeight; y++)
                        {
                            AForge.SystemTools.SetUnmanagedMemory(ptr, 255, rectWidth);
                            ptr += stride;
                        }
                    }
                }
            }
        }
Esempio n. 13
0
        private void ProcessFrame()
        {
            _currentPreprocessedImage = _preprocessingFilters.Apply(_currentSourceImage);

            UnmanagedImage sourceCopy = _currentSourceImage.Clone();
            UnmanagedImage output     = UnmanagedImage.Create(sourceCopy.Width, sourceCopy.Height, sourceCopy.PixelFormat);

            if (_previousPreprocessedImage != null)
            {
                //output = _lucasKanade.ShowDerivative(preprocessed, _previous, LucasKanade.DerivativeComponent.X);

                #region Point tracking
                Point centroid = new Point();
                for (int i = 0; i < _trackingPoints.Count; ++i)
                {
                    Point point    = _trackingPoints[i];
                    Point velocity = _tracker.CalculateVelocity(_currentPreprocessedImage, _previousPreprocessedImage, point.Round());

                    UpdatePointPosition(i, velocity);

                    centroid += _trackingPoints[i] / _trackingPoints.Count;

                    Drawing.Rectangle(sourceCopy, new Rectangle((int)point.X - 2, (int)point.Y - 2, 5, 5), Color.Yellow);
                }

                Drawing.Rectangle(sourceCopy, new Rectangle((int)centroid.X - 3, (int)centroid.Y - 3, 7, 7), Color.Red);
                #endregion

                DrawVelocityMap(output);
            }

            _previousPreprocessedImage = _currentPreprocessedImage;

            SourcePictureBox.Image       = sourceCopy.ToManagedImage();
            PreprocessedPictureBox.Image = _currentPreprocessedImage.ToManagedImage();
            if (output != null)
            {
                OutputPictureBox.Image = output.ToManagedImage();
            }
        }
Esempio n. 14
0
        public void ToManagedImageTest(PixelFormat pixelFormat, int x, int y, byte red, byte green, byte blue)
        {
            UnmanagedImage image = UnmanagedImage.Create(320, 240, pixelFormat);

            image.SetPixel(new IntPoint(x, y), Color.FromArgb(255, red, green, blue));

            Bitmap bitmap = image.ToManagedImage();

            // check colors of pixels
            Assert.AreEqual(Color.FromArgb(255, red, green, blue), bitmap.GetPixel(x, y));

            // make sure there are only 1 pixel
            UnmanagedImage temp = UnmanagedImage.FromManagedImage(bitmap);

            List <IntPoint> pixels = temp.CollectActivePixels();

            Assert.AreEqual(1, pixels.Count);

            image.Dispose();
            bitmap.Dispose();
            temp.Dispose();
        }
Esempio n. 15
0
        public void Correct(UnmanagedImage img, double aFocalLinPixels, int limit, double scale, int offx, int offy)
        {
            if (Math.Abs(_aFocalLinPixels - aFocalLinPixels) > Double.Epsilon || limit != _mFeLimit ||
                Math.Abs(scale - _mScaleFeSize) > Double.Epsilon || img.Width != _w || img.Height != _h ||
                _offsetx != offx || _offsety != offy)
            {
                Init(aFocalLinPixels, limit, scale, img.Width, img.Height, offx, offy);
            }
            var correctImage = UnmanagedImage.Create(img.Width, img.Height, img.PixelFormat);

            img.Copy(correctImage);
            int c = 0;

            for (int x = 0; x < _w; x++)
            {
                for (int y = 0; y < _h; y++)
                {
                    img.SetPixel(x, y, correctImage.GetPixel(_map[c, 0], _map[c, 1]));
                    c++;
                }
            }
            correctImage.Dispose();
        }
Esempio n. 16
0
        public static UnmanagedImage GenerateBlackWhite(int width,
                                                        int height,
                                                        Func <int, int, bool> predicate,
                                                        PixelFormat format = PixelFormat.Format24bppRgb)
        {
            var n           = (width * height) / 2;
            var whitePixels = new List <IntPoint>(n);
            var blackPixels = new List <IntPoint>((width * height) - n);

            // horizontal stripes
            for (var y = 0; y < height; y++)
            {
                for (var x = 0; x < width; x++)
                {
                    (predicate(x, y) ? whitePixels : blackPixels).Add(new IntPoint(x, y));
                }
            }

            var result = UnmanagedImage.Create(width, height, format);

            result.SetPixels(whitePixels, Color.White);
            result.SetPixels(blackPixels, Color.Black);
            return(result);
        }
Esempio n. 17
0
        public void SetGetPixelGrayscale()
        {
            UnmanagedImage image = UnmanagedImage.Create(320, 240, PixelFormat.Format8bppIndexed);

            image.SetPixel(0, 0, 255);
            image.SetPixel(319, 0, 127);
            image.SetPixel(0, 239, Color.FromArgb(64, 64, 64));

            Color color1 = image.GetPixel(0, 0);
            Color color2 = image.GetPixel(319, 0);
            Color color3 = image.GetPixel(0, 239);

            Assert.AreEqual(255, color1.R);
            Assert.AreEqual(255, color1.G);
            Assert.AreEqual(255, color1.B);

            Assert.AreEqual(127, color2.R);
            Assert.AreEqual(127, color2.G);
            Assert.AreEqual(127, color2.B);

            Assert.AreEqual(64, color3.R);
            Assert.AreEqual(64, color3.G);
            Assert.AreEqual(64, color3.B);
        }
Esempio n. 18
0
        /// <summary>
        /// Process new video frame.
        /// </summary>
        ///
        /// <param name="videoFrame">Video frame to process (detect motion in).</param>
        ///
        /// <remarks><para>Processes new frame from video source and detects motion in it.</para>
        ///
        /// <para>Check <see cref="MotionLevel"/> property to get information about amount of motion
        /// (changes) in the processed frame.</para>
        /// </remarks>
        ///
        public void ProcessFrame(UnmanagedImage videoFrame)
        {
            lock (sync)
            {
                // check previous frame
                if (previousFrame == null)
                {
                    // save image dimension
                    width  = videoFrame.Width;
                    height = videoFrame.Height;

                    // alocate memory for previous and current frames
                    previousFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed);
                    motionFrame   = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed);

                    frameSize = motionFrame.Stride * height;

                    // temporary buffer
                    if (suppressNoise)
                    {
                        tempFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed);
                    }

                    // convert source frame to grayscale
                    Accord.Vision.Tools.ConvertToGrayscale(videoFrame, previousFrame);

                    return;
                }

                // check image dimension
                if ((videoFrame.Width != width) || (videoFrame.Height != height))
                {
                    return;
                }

                // convert current image to grayscale
                Accord.Vision.Tools.ConvertToGrayscale(videoFrame, motionFrame);

                unsafe
                {
                    // pointers to previous and current frames
                    byte *prevFrame = (byte *)previousFrame.ImageData.ToPointer();
                    byte *currFrame = (byte *)motionFrame.ImageData.ToPointer();
                    // difference value
                    int diff;

                    // 1 - get difference between frames
                    // 2 - threshold the difference
                    // 3 - copy current frame to previous frame
                    for (int i = 0; i < frameSize; i++, prevFrame++, currFrame++)
                    {
                        // difference
                        diff = (int)*currFrame - (int)*prevFrame;
                        // copy current frame to previous
                        *prevFrame = *currFrame;
                        // treshold
                        *currFrame = ((diff >= differenceThreshold) || (diff <= differenceThresholdNeg)) ? (byte)255 : (byte)0;
                    }

                    if (suppressNoise)
                    {
                        // suppress noise and calculate motion amount
                        Accord.SystemTools.CopyUnmanagedMemory(tempFrame.ImageData, motionFrame.ImageData, frameSize);
                        erosionFilter.Apply(tempFrame, motionFrame);
                    }

                    // calculate amount of motion pixels
                    pixelsChanged = 0;
                    byte *motion = (byte *)motionFrame.ImageData.ToPointer();

                    for (int i = 0; i < frameSize; i++, motion++)
                    {
                        pixelsChanged += (*motion & 1);
                    }
                }
            }
        }
Esempio n. 19
0
        /// <summary>
        /// Process new video frame.
        /// </summary>
        ///
        /// <param name="videoFrame">Video frame to process (detect motion in).</param>
        ///
        /// <remarks><para>Processes new frame from video source and detects motion in it.</para>
        ///
        /// <para>Check <see cref="MotionLevel"/> property to get information about amount of motion
        /// (changes) in the processed frame.</para>
        /// </remarks>
        ///
        public unsafe void ProcessFrame(UnmanagedImage videoFrame)
        {
            lock ( _sync )
            {
                // check previous frame
                if (_previousFrame == null)
                {
                    // save image dimension
                    _width  = videoFrame.Width;
                    _height = videoFrame.Height;

                    // alocate memory for previous and current frames
                    _previousFrame = UnmanagedImage.Create(_width, _height, videoFrame.PixelFormat);
                    _motionFrame   = UnmanagedImage.Create(_width, _height, PixelFormat.Format8bppIndexed);
                    _motionSize    = _motionFrame.Stride * _height;

                    // temporary buffer
                    if (_suppressNoise)
                    {
                        _tempFrame = UnmanagedImage.Create(_width, _height, PixelFormat.Format8bppIndexed);
                    }

                    // conpy source frame
                    videoFrame.Copy(_previousFrame);

                    return;
                }

                // check image dimension
                if ((videoFrame.Width != _width) || (videoFrame.Height != _height))
                {
                    return;
                }


                // pointers to previous and current frames
                byte *prevFrame     = (byte *)_previousFrame.ImageData.ToPointer( );
                byte *currFrame     = (byte *)videoFrame.ImageData.ToPointer( );
                byte *motion        = (byte *)_motionFrame.ImageData.ToPointer( );
                int   bytesPerPixel = Tools.BytesPerPixel(videoFrame.PixelFormat);

                // difference value

                // 1 - get difference between frames
                // 2 - threshold the difference (accumulated over every channels)
                // 3 - copy current frame to previous frame
                for (int i = 0; i < _height; i++)
                {
                    var currFrameLocal = currFrame;
                    var prevFrameLocal = prevFrame;
                    var motionLocal    = motion;
                    for (int j = 0; j < _width; j++)
                    {
                        var diff = 0;
                        for (int nbBytes = 0; nbBytes < bytesPerPixel; nbBytes++)
                        {
                            // difference
                            diff += Math.Abs(*currFrameLocal - *prevFrameLocal);
                            // copy current frame to previous
                            *prevFrameLocal = *currFrameLocal;
                            currFrameLocal++;
                            prevFrameLocal++;
                        }
                        diff /= bytesPerPixel;
                        // threshold
                        *motionLocal = (diff >= _differenceThreshold) ? (byte)255 : (byte)0;
                        motionLocal++;
                    }
                    currFrame += videoFrame.Stride;
                    prevFrame += _previousFrame.Stride;
                    motion    += _motionFrame.Stride;
                }

                if (_suppressNoise)
                {
                    // suppress noise and calculate motion amount
                    Accord.SystemTools.CopyUnmanagedMemory(_tempFrame.ImageData, _motionFrame.ImageData, _motionSize);
                    _erosionFilter.Apply(_tempFrame, _motionFrame);
                }

                // calculate amount of motion pixels
                _pixelsChanged = 0;
                motion         = (byte *)_motionFrame.ImageData.ToPointer( );

                for (int i = 0; i < _motionSize; i++, motion++)
                {
                    _pixelsChanged += (*motion & 1);
                }
            }
        }
        public List <IntPoint> ProcessImage(UnmanagedImage image)
        {
            // check image format
            if (
                (image.PixelFormat != PixelFormat.Format8bppIndexed) &&
                (image.PixelFormat != PixelFormat.Format24bppRgb) &&
                (image.PixelFormat != PixelFormat.Format32bppRgb) &&
                (image.PixelFormat != PixelFormat.Format32bppArgb)
                )
            {
                throw new UnsupportedImageFormatException("Unsupported pixel format of the source image.");
            }

            // make sure we have grayscale image
            UnmanagedImage grayImage = null;

            grayImage = image.PixelFormat == PixelFormat.Format8bppIndexed ? image : Grayscale.CommonAlgorithms.BT709.Apply(image);


            // get source image size
            int width  = grayImage.Width;
            int height = grayImage.Height;
            int stride = grayImage.Stride;
            int offset = stride - width;



            // 1. Calculate partial differences
            UnmanagedImage diffx  = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed);
            UnmanagedImage diffy  = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed);
            UnmanagedImage diffxy = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed);

            unsafe
            {
                // Compute dx and dy
                byte *src = (byte *)grayImage.ImageData.ToPointer();
                byte *dx  = (byte *)diffx.ImageData.ToPointer();
                byte *dy  = (byte *)diffy.ImageData.ToPointer();

                // for each line
                for (int y = 0; y < height; y++)
                {
                    // for each pixel
                    for (int x = 0; x < width; x++, src++, dx++, dy++)
                    {
                        // TODO: Place those verifications
                        // outside the innermost loop
                        if (x == 0 || x == width - 1 ||
                            y == 0 || y == height - 1)
                        {
                            *dx = *dy = 0; continue;
                        }

                        int h = -(src[-stride - 1] + src[-1] + src[stride - 1]) +
                                (src[-stride + 1] + src[+1] + src[stride + 1]);
                        *dx = (byte)(h > 255 ? 255 : h < 0 ? 0 : h);

                        int v = -(src[-stride - 1] + src[-stride] + src[-stride + 1]) +
                                (src[+stride - 1] + src[+stride] + src[+stride + 1]);
                        *dy = (byte)(v > 255 ? 255 : v < 0 ? 0 : v);
                    }
                    src += offset;
                    dx  += offset;
                    dy  += offset;
                }


                // Compute dxy
                dx = (byte *)diffx.ImageData.ToPointer();
                var dxy = (byte *)diffxy.ImageData.ToPointer();

                // for each line
                for (int y = 0; y < height; y++)
                {
                    // for each pixel
                    for (int x = 0; x < width; x++, dx++, dxy++)
                    {
                        if (x == 0 || x == width - 1 ||
                            y == 0 || y == height - 1)
                        {
                            *dxy = 0; continue;
                        }

                        int v = -(dx[-stride - 1] + dx[-stride] + dx[-stride + 1]) +
                                (dx[+stride - 1] + dx[+stride] + dx[+stride + 1]);
                        *dxy = (byte)(v > 255 ? 255 : v < 0 ? 0 : v);
                    }
                    dx  += offset;
                    dxy += offset;
                }
            }


            // 2. Smooth the diff images
            if (_sigma > 0.0)
            {
                GaussianBlur blur = new GaussianBlur(_sigma);
                blur.ApplyInPlace(diffx);
                blur.ApplyInPlace(diffy);
                blur.ApplyInPlace(diffxy);
            }


            // 3. Compute Harris Corner Response
            float[,] H = new float[height, width];

            unsafe
            {
                byte *ptrA = (byte *)diffx.ImageData.ToPointer();
                byte *ptrB = (byte *)diffy.ImageData.ToPointer();
                byte *ptrC = (byte *)diffxy.ImageData.ToPointer();
                float M, A, B, C;

                for (int y = 0; y < height; y++)
                {
                    for (int x = 0; x < width; x++)
                    {
                        A = *(ptrA++);
                        B = *(ptrB++);
                        C = *(ptrC++);

                        // Harris corner measure
                        M = (A * B - C * C) - (_k * ((A + B) * (A + B)));

                        if (M > _threshold)
                        {
                            H[y, x] = M;
                        }
                        else
                        {
                            H[y, x] = 0;
                        }
                    }

                    ptrA += offset;
                    ptrB += offset;
                    ptrC += offset;
                }
            }


            // Free resources
            diffx.Dispose();
            diffy.Dispose();
            diffxy.Dispose();

            if (image.PixelFormat != PixelFormat.Format8bppIndexed)
            {
                grayImage.Dispose();
            }


            // 4. Suppress non-maximum points
            List <IntPoint> cornersList = new List <IntPoint>();

            // for each row
            for (int y = _r, maxY = height - _r; y < maxY; y++)
            {
                // for each pixel
                for (int x = _r, maxX = width - _r; x < maxX; x++)
                {
                    float currentValue = H[y, x];

                    // for each windows' row
                    for (int i = -_r; (currentValue != 0) && (i <= _r); i++)
                    {
                        // for each windows' pixel
                        for (int j = -_r; j <= _r; j++)
                        {
                            if (H[y + i, x + j] > currentValue)
                            {
                                currentValue = 0;
                                break;
                            }
                        }
                    }

                    // check if this point is really interesting
                    if (currentValue != 0)
                    {
                        cornersList.Add(new IntPoint(x, y));
                    }
                }
            }


            return(cornersList);
        }
Esempio n. 21
0
        protected override UnmanagedImage GetImage(int nr)
        {
            CheckImages();
            var fft   = new ComplexImage(F1X, F1Y);
            var w     = fft.Width;
            var data1 = fft.Channel0;
            var data2 = fft.Channel1;
            var data3 = fft.Channel2;
            var r     = _fftLightField.R;
            var g     = _fftLightField.G;
            var b     = _fftLightField.B;

            var mHalf     = _m >> 1;
            var nHalf     = _n >> 1;
            var theta1ByM = (nr - CAngles) / _m;
            var theta2ByN = (nr - CAngles) / _n;

            // TODO: optimieren
            var dd  = (-mHalf * theta1ByM) + CAngles;
            var ccc = (-nHalf * theta2ByN) + CAngles;

            for (var y = 0; y < F1Y; y++)
            {
                var cc = ccc;
                for (int x = 0,
                     i = y * w;
                     x < F1X;
                     x++, i++)
                {
                    data1[i] = r[x, y][cc, dd];
                    data2[i] = g[x, y][cc, dd];
                    data3[i] = b[x, y][cc, dd];
                    cc      += theta2ByN;
                }

                dd += theta1ByM;
            }

            fft.InverseFFT2(false);

            var min = double.MaxValue;
            var max = double.MinValue;

            fft.ForEachChannel(
                c => {
                var val = c.Real;
                if (val < min)
                {
                    min = val;
                }

                if (val > max)
                {
                    max = val;
                }
            }
                );

            var invdist = byte.MaxValue / (max - min);

            data1 = fft.Channel0;
            data2 = fft.Channel1;
            data3 = fft.Channel2;

            var dst = UnmanagedImage.Create(F1X, F1Y, fft.PixelFormat);

            for (var y = 0; y < F1Y; y++)
            {
                for (int x = 0,
                     i = y * w;
                     x < F1X;
                     x++, i++)
                {
                    dst.SetPixel(
                        x,
                        y,
                        Color.FromArgb(
                            (int)((data1[i].Real - min) * invdist),
                            (int)((data2[i].Real - min) * invdist),
                            (int)((data3[i].Real - min) * invdist)
                            )
                        );
                }
            }

            return(dst);
        }
        /// <summary>
        /// Process new video frame.
        /// </summary>
        ///
        /// <param name="videoFrame">Video frame to process (detect motion in).</param>
        ///
        /// <remarks><para>Processes new frame from video source and detects motion in it.</para>
        ///
        /// <para>Check <see cref="MotionLevel"/> property to get information about amount of motion
        /// (changes) in the processed frame.</para>
        /// </remarks>
        ///
        public unsafe void ProcessFrame(UnmanagedImage videoFrame)
        {
            lock ( sync )
            {
                // check background frame
                if (backgroundFrame == null)
                {
                    // save image dimension
                    width  = videoFrame.Width;
                    height = videoFrame.Height;

                    // alocate memory for background frame
                    backgroundFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed);
                    frameSize       = backgroundFrame.Stride * height;

                    // convert source frame to grayscale
                    Tools.ConvertToGrayscale(videoFrame, backgroundFrame);

                    return;
                }

                // check image dimension
                if ((videoFrame.Width != width) || (videoFrame.Height != height))
                {
                    return;
                }

                // check motion frame
                if (motionFrame == null)
                {
                    motionFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed);

                    // temporary buffer
                    if (suppressNoise)
                    {
                        tempFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed);
                    }
                }

                // convert current image to grayscale
                Tools.ConvertToGrayscale(videoFrame, motionFrame);

                // pointers to background and current frames
                byte *backFrame;
                byte *currFrame;
                int   diff;

                backFrame = (byte *)backgroundFrame.ImageData.ToPointer( );
                currFrame = (byte *)motionFrame.ImageData.ToPointer( );

                // 1 - get difference between frames
                // 2 - threshold the difference
                for (int i = 0; i < frameSize; i++, backFrame++, currFrame++)
                {
                    // difference
                    diff = (int)*currFrame - (int)*backFrame;
                    // treshold
                    *currFrame = ((diff >= differenceThreshold) || (diff <= differenceThresholdNeg)) ? (byte)255 : (byte)0;
                }

                if (suppressNoise)
                {
                    // suppress noise and calculate motion amount
                    AForge.SystemTools.CopyUnmanagedMemory(tempFrame.ImageData, motionFrame.ImageData, frameSize);
                    erosionFilter.Apply(tempFrame, motionFrame);

                    if (keepObjectEdges)
                    {
                        AForge.SystemTools.CopyUnmanagedMemory(tempFrame.ImageData, motionFrame.ImageData, frameSize);
                        dilatationFilter.Apply(tempFrame, motionFrame);
                    }
                }

                // calculate amount of motion pixels
                pixelsChanged = 0;
                byte *motion = (byte *)motionFrame.ImageData.ToPointer( );

                for (int i = 0; i < frameSize; i++, motion++)
                {
                    pixelsChanged += (*motion & 1);
                }
            }
        }
Esempio n. 23
0
        public void Collect8bppPixelValuesTest_RGB()
        {
            // create grayscale image
            UnmanagedImage image = UnmanagedImage.Create(320, 240, PixelFormat.Format24bppRgb);

            // draw vertical and horizontal lines
            Drawing.Line(image, new IntPoint(10, 10), new IntPoint(20, 10), Color.FromArgb(128, 129, 130));
            Drawing.Line(image, new IntPoint(20, 20), new IntPoint(20, 30), Color.FromArgb(64, 65, 66));

            // prepare lists with coordinates
            List <IntPoint> horizontal  = new List <IntPoint>();
            List <IntPoint> horizontalU = new List <IntPoint>();
            List <IntPoint> horizontalD = new List <IntPoint>();

            for (int x = 10; x <= 20; x++)
            {
                horizontal.Add(new IntPoint(x, 10));  // on the line
                horizontalU.Add(new IntPoint(x, 9));  // above
                horizontalD.Add(new IntPoint(x, 11)); // below
            }

            List <IntPoint> vertical  = new List <IntPoint>();
            List <IntPoint> verticalL = new List <IntPoint>();
            List <IntPoint> verticalR = new List <IntPoint>();

            for (int y = 20; y <= 30; y++)
            {
                vertical.Add(new IntPoint(20, y));    // on the line
                verticalL.Add(new IntPoint(19, y));   // left
                verticalR.Add(new IntPoint(21, y));   // right
            }

            // collect all pixel's values
            byte[] horizontalValues  = image.Collect8bppPixelValues(horizontal);
            byte[] horizontalUValues = image.Collect8bppPixelValues(horizontalU);
            byte[] horizontalDValues = image.Collect8bppPixelValues(horizontalD);
            byte[] verticalValues    = image.Collect8bppPixelValues(vertical);
            byte[] verticalLValues   = image.Collect8bppPixelValues(verticalL);
            byte[] verticalRValues   = image.Collect8bppPixelValues(verticalR);

            Assert.AreEqual(horizontal.Count * 3, horizontalValues.Length);
            Assert.AreEqual(vertical.Count * 3, verticalValues.Length);

            // check all pixel values
            for (int i = 0, n = horizontalValues.Length; i < n; i += 3)
            {
                Assert.AreEqual(128, horizontalValues[i]);
                Assert.AreEqual(129, horizontalValues[i + 1]);
                Assert.AreEqual(130, horizontalValues[i + 2]);

                Assert.AreEqual(0, horizontalUValues[i]);
                Assert.AreEqual(0, horizontalUValues[i + 1]);
                Assert.AreEqual(0, horizontalUValues[i + 2]);

                Assert.AreEqual(0, horizontalDValues[i]);
                Assert.AreEqual(0, horizontalDValues[i + 1]);
                Assert.AreEqual(0, horizontalDValues[i + 2]);
            }

            for (int i = 0, n = verticalValues.Length; i < n; i += 3)
            {
                Assert.AreEqual(64, verticalValues[i]);
                Assert.AreEqual(65, verticalValues[i + 1]);
                Assert.AreEqual(66, verticalValues[i + 2]);

                Assert.AreEqual(0, verticalLValues[i]);
                Assert.AreEqual(0, verticalLValues[i + 1]);
                Assert.AreEqual(0, verticalLValues[i + 2]);

                Assert.AreEqual(0, verticalRValues[i]);
                Assert.AreEqual(0, verticalRValues[i + 1]);
                Assert.AreEqual(0, verticalRValues[i + 2]);
            }
        }
Esempio n. 24
0
        public void gh_784()
        {
            Accord.Math.Random.Generator.Seed = 0;
            var rnd = Accord.Math.Random.Generator.Random;

            // This is the same example found in Wikipedia page on
            // k-d trees: http://en.wikipedia.org/wiki/K-d_tree

            var image = UnmanagedImage.Create(800, 600, PixelFormat.Format24bppRgb);

            // Suppose we have the following set of points:
            var points     = new double[300000][];
            var listPoints = new List <IntPoint>(points.Length);

            for (int i = 0; i < points.Length; i++)
            {
                var point = new IntPoint(rnd.Next(0, image.Width), rnd.Next(0, image.Height));

                points[i] = new double[] { point.X, point.Y };

                listPoints.Add(point);
            }

            var region = new Rectangle(676, 441, 70, 55);

            var sw1 = new Stopwatch();

            sw1.Restart();
            var query1 = listPoints.FindAll((obj) =>
            {
                return(obj.X > region.Left && obj.X < region.Right && obj.Y > region.Top && obj.Y < region.Bottom);
            });

            sw1.Stop();

            listPoints.Clear();

            var sw2 = new Stopwatch();

            sw2.Restart();
            // To create a tree from a set of points, we can use
            var tree = KDTree.FromData <int>(points, new Accord.Math.Distances.Manhattan(), inPlace: true);

            sw2.Stop();

            var sw3 = new Stopwatch();

            sw3.Restart();
            var actual = tree.GetNodesInsideRegion(region.ToHyperrectangle()).Apply(x => new IntPoint((int)x.Position[0], (int)x.Position[1]));

            sw3.Stop();


            var sw4 = new Stopwatch();

            sw4.Restart();
            var expected = QueryKDTree2D(tree.Root, region);

            sw4.Stop();

            Assert.AreEqual(actual, expected);
        }
Esempio n. 25
0
        public void Detect(ref Bitmap image)
        {
            List <List <IntPoint> > markers = new List <List <IntPoint> >();
            Bitmap tmp = image;

            BitmapData     bitmapData     = image.LockBits(new Rectangle(0, 0, image.Width, image.Height), ImageLockMode.ReadOnly, image.PixelFormat);
            UnmanagedImage unmanagedImage = new UnmanagedImage(bitmapData);

            UnmanagedImage grayImage = UnmanagedImage.Create(unmanagedImage.Width, unmanagedImage.Height, PixelFormat.Format8bppIndexed);

            Grayscale.CommonAlgorithms.BT709.Apply(unmanagedImage, grayImage);

            DifferenceEdgeDetector edgeDetector = new DifferenceEdgeDetector();
            UnmanagedImage         edgesImage   = edgeDetector.Apply(grayImage);

            image.UnlockBits(bitmapData);

            if (this.edgeImage.Checked)
            {
                tmp = edgesImage.ToManagedImage().Clone(new Rectangle(0, 0, edgesImage.Width, edgesImage.Height), PixelFormat.Format24bppRgb);
            }

            Threshold thresholdFilter = new Threshold(this.binThreshold);

            thresholdFilter.ApplyInPlace(edgesImage);

            if (this.thresholdEdgeImage.Checked)
            {
                tmp = edgesImage.ToManagedImage().Clone(new Rectangle(0, 0, edgesImage.Width, edgesImage.Height), PixelFormat.Format24bppRgb);
            }

            this.blobCounter.ProcessImage(edgesImage);
            Blob[] blobs = blobCounter.GetObjectsInformation();

            for (int i = 0, n = blobs.Length; i < n; i++)
            {
                List <IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blobs[i]);
                List <IntPoint> corners    = null;


                if (this.isSquare(edgePoints, out corners))
                {
                    List <IntPoint> leftEdgePoints, rightEdgePoints;
                    blobCounter.GetBlobsLeftAndRightEdges(blobs[i],
                                                          out leftEdgePoints, out rightEdgePoints);

                    float diff = calculateAverageEdgesBrightnessDifference(
                        leftEdgePoints, rightEdgePoints, grayImage);

                    if (diff > 50)
                    {
                        markers.Add(corners);
                    }
                }
            }

            foreach (List <IntPoint> marker in markers)
            {
                Color    markerColor;
                IntPoint markerOrientation = this.markerOrientation(image, marker, out markerColor);
                IntPoint center            = marker[2] - marker[0];
                center.X = marker[0].X + Convert.ToInt32(center.X * 0.5);
                center.Y = marker[0].Y + Convert.ToInt32(center.Y * 0.5);

                if (this.drawMarkersOnVideo.Checked)
                {
                    if ((this.edgeImage.Checked) || (this.thresholdEdgeImage.Checked))
                    {
                        this.drawMarker(tmp, marker, markerOrientation, markerColor);
                    }
                    else
                    {
                        this.drawMarker(image, marker, markerOrientation, markerColor);
                    }
                }
                ColorDiscriminator discriminator = new ColorDiscriminator();
                discriminator.Color = markerColor;

                LocationSourceManager.Instance.updateLocationSource(discriminator, center);
            }
            image = tmp;
        }
Esempio n. 26
0
        /// <summary>
        /// Search for glyphs in the specified image and recognize them.
        /// </summary>
        ///
        /// <param name="image">Image to search glyphs in.</param>
        ///
        /// <returns>Return a list of found glyphs.</returns>
        ///
        /// <remarks><para>The method does processing of the specified image and searches for glyphs in it of
        /// the specified <see cref="GlyphSize">size</see>. In the case if <see cref="GlyphDatabase">glyphs' database</see>
        /// is set, it tries to find a matching glyph in it for each found glyph in the image. If matching is found,
        /// then <see cref="ExtractedGlyphData.RecognizedGlyph">RecognizedGlyph</see> and
        /// <see cref="ExtractedGlyphData.RecognizedQuadrilateral">RecognizedQuadrilateral</see>
        /// properties of <see cref="ExtractedGlyphData"/> are set correspondingly.</para></remarks>
        ///
        /// <exception cref="UnsupportedImageFormatException">Pixel format of the specified image is not supported.
        /// It must be 8 bpp indexed or 24/32 bpp color image.</exception>
        ///
        public List <ExtractedGlyphData> FindGlyphs(UnmanagedImage image)
        {
            List <ExtractedGlyphData> extractedGlyphs = new List <ExtractedGlyphData>( );

            if ((image.PixelFormat != PixelFormat.Format8bppIndexed) &&
                (!Grayscale.CommonAlgorithms.BT709.FormatTranslations.ContainsKey(image.PixelFormat)))
            {
                throw new UnsupportedImageFormatException("Pixel format of the specified image is not supported.");
            }

            // 1 - grayscaling
            UnmanagedImage grayImage = null;

            if (image.PixelFormat == PixelFormat.Format8bppIndexed)
            {
                grayImage = image;
            }
            else
            {
                grayImage = UnmanagedImage.Create(image.Width, image.Height, PixelFormat.Format8bppIndexed);
                Grayscale.CommonAlgorithms.BT709.Apply(image, grayImage);
            }

            // 2 - Edge detection
            UnmanagedImage edgesImage = edgeDetector.Apply(grayImage);

            // 3 - Threshold edges
            thresholdFilter.ApplyInPlace(edgesImage);

            // 4 - Blob Counter
            blobCounter.ProcessImage(edgesImage);
            Blob[] blobs = blobCounter.GetObjectsInformation( );

            // 5 - check each blob
            for (int i = 0, n = blobs.Length; i < n; i++)
            {
                List <IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blobs[i]);
                List <IntPoint> corners    = null;

                // does it look like a quadrilateral ?
                if (shapeChecker.IsQuadrilateral(edgePoints, out corners))
                {
                    // get edge points on the left and on the right side
                    List <IntPoint> leftEdgePoints, rightEdgePoints;
                    blobCounter.GetBlobsLeftAndRightEdges(blobs[i], out leftEdgePoints, out rightEdgePoints);

                    // calculate average difference between pixel values from outside of the shape and from inside
                    float diff = CalculateAverageEdgesBrightnessDifference(
                        leftEdgePoints, rightEdgePoints, grayImage);

                    // check average difference, which tells how much outside is lighter than inside on the average
                    if (diff > 20)
                    {
                        // perform glyph recognition
                        ExtractedGlyphData glyphData = RecognizeGlyph(grayImage, corners);

                        if (glyphData != null)
                        {
                            extractedGlyphs.Add(glyphData);

                            if (extractedGlyphs.Count >= maxNumberOfGlyphsToSearch)
                            {
                                break;
                            }
                        }
                    }
                }
            }

            // dispose resources
            if (image.PixelFormat != PixelFormat.Format8bppIndexed)
            {
                grayImage.Dispose( );
            }
            edgesImage.Dispose( );


            return(extractedGlyphs);
        }
Esempio n. 27
0
        /// <summary>
        /// Process new video frame.
        /// </summary>
        ///
        /// <param name="videoFrame">Video frame to process (detect motion in).</param>
        ///
        /// <remarks><para>Processes new frame from video source and detects motion in it.</para>
        ///
        /// <para>Check <see cref="MotionLevel"/> property to get information about amount of motion
        /// (changes) in the processed frame.</para>
        /// </remarks>
        ///
        public unsafe void ProcessFrame(UnmanagedImage videoFrame)
        {
            lock ( _sync )
            {
                // check background frame
                if (_backgroundFrame == null)
                {
                    _lastTimeMeasurment = DateTime.Now;

                    // save image dimension
                    _width  = videoFrame.Width;
                    _height = videoFrame.Height;

                    // alocate memory for previous and current frames
                    _backgroundFrame = UnmanagedImage.Create(_width, _height, PixelFormat.Format8bppIndexed);
                    _motionFrame     = UnmanagedImage.Create(_width, _height, PixelFormat.Format8bppIndexed);

                    _frameSize = _motionFrame.Stride * _height;

                    // temporary buffer
                    if (_suppressNoise)
                    {
                        _tempFrame = UnmanagedImage.Create(_width, _height, PixelFormat.Format8bppIndexed);
                    }

                    // convert source frame to grayscale
                    Tools.ConvertToGrayscale(videoFrame, _backgroundFrame);

                    return;
                }

                // check image dimension
                if ((videoFrame.Width != _width) || (videoFrame.Height != _height))
                {
                    return;
                }

                // convert current image to grayscale
                Tools.ConvertToGrayscale(videoFrame, _motionFrame);

                // pointers to background and current frames
                byte *backFrame;
                byte *currFrame;
                int   diff;

                // update background frame
                if (_millisecondsPerBackgroundUpdate == 0)
                {
                    // update background frame using frame counter as a base
                    if (++_framesCounter == _framesPerBackgroundUpdate)
                    {
                        _framesCounter = 0;

                        backFrame = (byte *)_backgroundFrame.ImageData.ToPointer( );
                        currFrame = (byte *)_motionFrame.ImageData.ToPointer( );

                        for (int i = 0; i < _frameSize; i++, backFrame++, currFrame++)
                        {
                            diff = *currFrame - *backFrame;
                            if (diff > 0)
                            {
                                (*backFrame)++;
                            }
                            else if (diff < 0)
                            {
                                (*backFrame)--;
                            }
                        }
                    }
                }
                else
                {
                    // update background frame using timer as a base

                    // get current time and calculate difference
                    DateTime currentTime = DateTime.Now;
                    TimeSpan timeDff     = currentTime - _lastTimeMeasurment;
                    // save current time as the last measurment
                    _lastTimeMeasurment = currentTime;

                    int millisonds = (int)timeDff.TotalMilliseconds + _millisecondsLeftUnprocessed;

                    // save remainder so it could be taken into account in the future
                    _millisecondsLeftUnprocessed = millisonds % _millisecondsPerBackgroundUpdate;
                    // get amount for background update
                    int updateAmount = (millisonds / _millisecondsPerBackgroundUpdate);

                    backFrame = (byte *)_backgroundFrame.ImageData.ToPointer( );
                    currFrame = (byte *)_motionFrame.ImageData.ToPointer( );

                    for (int i = 0; i < _frameSize; i++, backFrame++, currFrame++)
                    {
                        diff = *currFrame - *backFrame;
                        if (diff > 0)
                        {
                            (*backFrame) += (byte)((diff < updateAmount) ? diff :  updateAmount);
                        }
                        else if (diff < 0)
                        {
                            (*backFrame) += (byte)((-diff < updateAmount) ? diff : -updateAmount);
                        }
                    }
                }

                backFrame = (byte *)_backgroundFrame.ImageData.ToPointer( );
                currFrame = (byte *)_motionFrame.ImageData.ToPointer( );

                // 1 - get difference between frames
                // 2 - threshold the difference
                for (int i = 0; i < _frameSize; i++, backFrame++, currFrame++)
                {
                    // difference
                    diff = *currFrame - *backFrame;
                    // treshold
                    *currFrame = ((diff >= _differenceThreshold) || (diff <= _differenceThresholdNeg)) ? (byte)255 : (byte)0;
                }

                if (_suppressNoise)
                {
                    // suppress noise and calculate motion amount
                    AForge.SystemTools.CopyUnmanagedMemory(_tempFrame.ImageData, _motionFrame.ImageData, _frameSize);
                    _erosionFilter.Apply(_tempFrame, _motionFrame);

                    if (_keepObjectEdges)
                    {
                        AForge.SystemTools.CopyUnmanagedMemory(_tempFrame.ImageData, _motionFrame.ImageData, _frameSize);
                        _dilatationFilter.Apply(_tempFrame, _motionFrame);
                    }
                }

                // calculate amount of motion pixels
                _pixelsChanged = 0;
                byte *motion = (byte *)_motionFrame.ImageData.ToPointer( );

                for (int i = 0; i < _frameSize; i++, motion++)
                {
                    _pixelsChanged += (*motion & 1);
                }
            }
        }
Esempio n. 28
0
        /// <summary>
        /// Process new video frame.
        /// </summary>
        ///
        /// <param name="videoFrame">Video frame to process (detect motion in).</param>
        ///
        /// <remarks><para>Processes new frame from video source and detects motion in it.</para>
        ///
        /// <para>Check <see cref="MotionLevel"/> property to get information about amount of motion
        /// (changes) in the processed frame.</para>
        /// </remarks>
        ///
        public unsafe void ProcessFrame(UnmanagedImage videoFrame)
        {
            //lock ( _sync )
            {
                // check previous frame
                if (_previousFrame == null)
                {
                    // save image dimension
                    _width  = videoFrame.Width;
                    _height = videoFrame.Height;

                    // alocate memory for previous and current frames
                    _previousFrame = UnmanagedImage.Create(_width, _height, PixelFormat.Format8bppIndexed);
                    _motionFrame   = UnmanagedImage.Create(_width, _height, PixelFormat.Format8bppIndexed);

                    _frameSize = _motionFrame.Stride * _height;

                    // temporary buffer
                    if (_suppressNoise)
                    {
                        _tempFrame = UnmanagedImage.Create(_width, _height, PixelFormat.Format8bppIndexed);
                    }

                    // convert source frame to grayscale
                    Tools.ConvertToGrayscale(videoFrame, _previousFrame);

                    return;
                }

                // check image dimension
                if ((videoFrame.Width != _width) || (videoFrame.Height != _height))
                {
                    return;
                }

                // convert current image to grayscale
                Tools.ConvertToGrayscale(videoFrame, _motionFrame);

                // pointers to previous and current frames
                byte *prevFrame = (byte *)_previousFrame.ImageData.ToPointer( );
                byte *currFrame = (byte *)_motionFrame.ImageData.ToPointer( );
                // difference value

                // 1 - get difference between frames
                // 2 - threshold the difference
                // 3 - copy current frame to previous frame
                for (int i = 0; i < _frameSize; i++, prevFrame++, currFrame++)
                {
                    // difference
                    var diff = *currFrame - *prevFrame;
                    // copy current frame to previous
                    *prevFrame = *currFrame;
                    // treshold
                    *currFrame = ((diff >= _differenceThreshold) || (diff <= _differenceThresholdNeg)) ? (byte)255 : (byte)0;
                }

                if (_suppressNoise)
                {
                    // suppress noise and calculate motion amount
                    AForge.SystemTools.CopyUnmanagedMemory(_tempFrame.ImageData, _motionFrame.ImageData, _frameSize);
                    _erosionFilter.Apply(_tempFrame, _motionFrame);
                }

                // calculate amount of motion pixels
                _pixelsChanged = 0;
                byte *motion = (byte *)_motionFrame.ImageData.ToPointer( );

                for (int i = 0; i < _frameSize; i++, motion++)
                {
                    _pixelsChanged += (*motion & 1);
                }
            }
        }
Esempio n. 29
0
        /// <summary>
        ///   Codes a image using the light field based on the given parameters.
        /// </summary>
        /// <param name="parameter">
        ///   <see cref="LightfieldParam" />
        /// </param>
        /// <returns></returns>
        public unsafe UnmanagedImage GetUnmanagedImage(LightfieldParam parameter)
        {
            var lightFieldSize = ViewRows * ViewColumns;

            if (lightFieldSize == 0)
            {
                return(null);
            }

            var doField    = Math.Min(MaximumDepthOfField(parameter.ViewPoint), parameter.DepthOfField);
            var focusPoint = parameter.ViewPoint;

            if (focusPoint == default)
            {
                focusPoint = ViewPoint;
            }

            var doFocus = parameter.DepthOfFocus;
            var x0Focus = Math.Min(Math.Max(focusPoint.X, doField), ViewColumns - doField - 1);
            var y0Focus = Math.Min(Math.Max(focusPoint.Y, doField), ViewRows - doField - 1);

            var nLightFields       = MathCV.Pow2(doField + doField + 1);
            var invNormLightFields = 1 / (float)nLightFields;

            var img = DoGetUnmanImage(x0Focus, y0Focus);

            if (img == null)
            {
                return(null);
            }

            var format = img.PixelFormat;

            var h  = img.Height;
            var s  = img.Stride;
            var n  = s * h;
            var dt = (byte *)img.ImageData;

            Preferences.Supported.CheckFormat(format);

            var dxFocus = doFocus;
            var dyFocus = parameter.InvertFocus ? -doFocus : doFocus;

            var sumData = new float[img.Stride * h];

            for (var i = 0; i < sumData.Length; i++)
            {
                sumData[i] = dt[i] * invNormLightFields;
            }

            var result = UnmanagedImage.Create(img.Width, img.Height, format);

            dt = (byte *)result.ImageData;

            var xMax = x0Focus + doField;
            var yMin = y0Focus - doField;
            var yMax = y0Focus + doField;

            for (var x = x0Focus - doField; x <= xMax; x++)
            {
                for (var y = yMin; y <= yMax; y++)
                {
                    if (x == x0Focus &&
                        y == y0Focus)
                    {
                        continue;
                    }

                    var view = DoGetUnmanImage(x, y);
                    if (view == null)
                    {
                        return(null);
                    }

                    if (view.PixelFormat != format)
                    {
                        throw new BadImageFormatException(nameof(view));
                    }

                    try {
                        IntegrateImage(
                            view,
                            sumData,
                            dxFocus * (x - x0Focus),
                            dyFocus * (y - y0Focus),
                            invNormLightFields
                            );
                    } catch (OperationCanceledException) {
                        return(null);
                    }
                }
            }

            for (var i = 0; i < n; i++)
            {
                dt[i] = sumData[i].ClampToByte();
            }

            //      throw new NotSupportedException("Not tested use of (float).LimitToByte(), or is (float).ToBase256() correct?");
            return(result);
        }
Esempio n. 30
0
        /// <summary>
        /// Process new video frame.
        /// </summary>
        ///
        /// <param name="videoFrame">Video frame to process (detect motion in).</param>
        ///
        /// <remarks><para>Processes new frame from video source and detects motion in it.</para>
        ///
        /// <para>Check <see cref="MotionLevel"/> property to get information about amount of motion
        /// (changes) in the processed frame.</para>
        /// </remarks>
        ///
        public unsafe void ProcessFrame(UnmanagedImage videoFrame)
        {
            lock ( sync )
            {
                // check background frame
                if (backgroundFrame == null)
                {
                    lastTimeMeasurment = DateTime.Now;

                    // save image dimension
                    width  = videoFrame.Width;
                    height = videoFrame.Height;

                    // alocate memory for previous and current frames
                    backgroundFrame = UnmanagedImage.Create(width, height, videoFrame.PixelFormat);
                    motionFrame     = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed);

                    frameSize  = videoFrame.Stride * height;
                    motionSize = motionFrame.Stride * motionFrame.Height;

                    // temporary buffer
                    if (suppressNoise)
                    {
                        tempFrame = UnmanagedImage.Create(width, height, PixelFormat.Format8bppIndexed);
                    }

                    // set the backgroundframe
                    videoFrame.Copy(backgroundFrame);

                    return;
                }

                // check image dimension
                if ((videoFrame.Width != width) || (videoFrame.Height != height))
                {
                    return;
                }


                // pointers to background and current frames
                byte *backFrame;
                byte *currFrame;
                int   diff;

                // update background frame
                if (millisecondsPerBackgroundUpdate == 0)
                {
                    // update background frame using frame counter as a base
                    if (++framesCounter == framesPerBackgroundUpdate)
                    {
                        framesCounter = 0;

                        backFrame = (byte *)backgroundFrame.ImageData.ToPointer( );
                        currFrame = (byte *)videoFrame.ImageData.ToPointer( );

                        for (int i = 0; i < frameSize; i++, backFrame++, currFrame++)
                        {
                            diff = *currFrame - *backFrame;
                            if (diff > 0)
                            {
                                (*backFrame)++;
                            }
                            else if (diff < 0)
                            {
                                (*backFrame)--;
                            }
                        }
                    }
                }
                else
                {
                    // update background frame using timer as a base

                    // get current time and calculate difference
                    DateTime currentTime = DateTime.Now;
                    TimeSpan timeDff     = currentTime - lastTimeMeasurment;
                    // save current time as the last measurment
                    lastTimeMeasurment = currentTime;

                    int millisonds = (int)timeDff.TotalMilliseconds + millisecondsLeftUnprocessed;

                    // save remainder so it could be taken into account in the future
                    millisecondsLeftUnprocessed = millisonds % millisecondsPerBackgroundUpdate;
                    // get amount for background update
                    int updateAmount = (int)(millisonds / millisecondsPerBackgroundUpdate);

                    backFrame = (byte *)backgroundFrame.ImageData.ToPointer( );
                    currFrame = (byte *)videoFrame.ImageData.ToPointer( );

                    for (int i = 0; i < frameSize; i++, backFrame++, currFrame++)
                    {
                        diff = *currFrame - *backFrame;
                        if (diff > 0)
                        {
                            (*backFrame) += (byte)((diff < updateAmount) ? diff :  updateAmount);
                        }
                        else if (diff < 0)
                        {
                            (*backFrame) += (byte)((-diff < updateAmount) ? diff : -updateAmount);
                        }
                    }
                }

                backFrame = (byte *)backgroundFrame.ImageData.ToPointer( );
                currFrame = (byte *)videoFrame.ImageData.ToPointer( );
                byte *motion = (byte *)motionFrame.ImageData.ToPointer( );
                byte *currFrameLocal;
                byte *backFrameLocal;
                byte *motionLocal;
                int   bytesPerPixel = Tools.BytesPerPixel(videoFrame.PixelFormat);

                // 1 - get difference between frames (accumulated on every channel)
                // 2 - threshold the difference
                for (int i = 0; i < height; i++)
                {
                    currFrameLocal = currFrame;
                    backFrameLocal = backFrame;
                    motionLocal    = motion;
                    for (int j = 0; j < width; j++)
                    {
                        diff = 0;
                        for (int nbBytes = 0; nbBytes < bytesPerPixel; nbBytes++)
                        {
                            // difference
                            diff += Math.Abs((int)*currFrameLocal - (int)*backFrameLocal);
                            currFrameLocal++;
                            backFrameLocal++;
                        }
                        diff /= bytesPerPixel;
                        *motionLocal = (diff >= differenceThreshold) ? (byte)255 : (byte)0;
                        motionLocal++;
                    }
                    currFrame += videoFrame.Stride;
                    backFrame += backgroundFrame.Stride;
                    motion    += motionFrame.Stride;
                }

                if (suppressNoise)
                {
                    // suppress noise and calculate motion amount
                    AForge.SystemTools.CopyUnmanagedMemory(tempFrame.ImageData, motionFrame.ImageData, motionSize);
                    erosionFilter.Apply(tempFrame, motionFrame);

                    if (keepObjectEdges)
                    {
                        AForge.SystemTools.CopyUnmanagedMemory(tempFrame.ImageData, motionFrame.ImageData, motionSize);
                        dilatationFilter.Apply(tempFrame, motionFrame);
                    }
                }

                // calculate amount of motion pixels
                pixelsChanged = 0;
                motion        = (byte *)motionFrame.ImageData.ToPointer( );

                for (int i = 0; i < motionSize; i++, motion++)
                {
                    pixelsChanged += (*motion & 1);
                }
            }
        }