Exemplo n.º 1
0
        public Morphology()
        {
            using (IplImage srcImg = new IplImage(FilePath.Image.Lenna, LoadMode.AnyDepth | LoadMode.AnyColor))
                using (IplImage dstImgDilate = srcImg.Clone())
                    using (IplImage dstImgErode = srcImg.Clone())
                        using (IplImage dstImgOpening = srcImg.Clone())
                            using (IplImage dstImgClosing = srcImg.Clone())
                                using (IplImage dstImgGradient = srcImg.Clone())
                                    using (IplImage dstImgTophat = srcImg.Clone())
                                        using (IplImage dstImgBlackhat = srcImg.Clone())
                                            using (IplImage tmpImg = srcImg.Clone())
                                            {
                                                IplConvKernel element = Cv.CreateStructuringElementEx(9, 9, 4, 4, ElementShape.Rect, null);

                                                Cv.Dilate(srcImg, dstImgDilate, element, 1);
                                                Cv.Erode(srcImg, dstImgErode, element, 1);
                                                Cv.MorphologyEx(srcImg, dstImgOpening, tmpImg, element, MorphologyOperation.Open, 1);
                                                Cv.MorphologyEx(srcImg, dstImgClosing, tmpImg, element, MorphologyOperation.Close, 1);
                                                Cv.MorphologyEx(srcImg, dstImgGradient, tmpImg, element, MorphologyOperation.Gradient, 1);
                                                Cv.MorphologyEx(srcImg, dstImgTophat, tmpImg, element, MorphologyOperation.TopHat, 1);
                                                Cv.MorphologyEx(srcImg, dstImgBlackhat, tmpImg, element, MorphologyOperation.BlackHat, 1);

                                                using (new CvWindow("src", srcImg))
                                                    using (new CvWindow("dilate", dstImgDilate))
                                                        using (new CvWindow("erode", dstImgErode))
                                                            using (new CvWindow("opening", dstImgOpening))
                                                                using (new CvWindow("closing", dstImgClosing))
                                                                    using (new CvWindow("gradient", dstImgGradient))
                                                                        using (new CvWindow("tophat", dstImgTophat))
                                                                            using (new CvWindow("blackhat", dstImgBlackhat))
                                                                            {
                                                                                Cv.WaitKey(0);
                                                                            }
                                            }
        }
Exemplo n.º 2
0
 internal static extern void cvMorphologyEx(
     Arr src,
     Arr dst,
     Arr temp,
     IplConvKernel element,
     MorphologicalOperation operation,
     int iterations);
Exemplo n.º 3
0
        public IplImage ErodeImage(IplImage src)
        {
            morp = new IplImage(src.Size, BitDepth.U8, 3);
            bin  = this.Binary(src, 50);

            IplConvKernel element = new IplConvKernel(3, 3, 1, 1, ElementShape.Rect);

            Cv.Erode(bin, morp, element, 10);
            return(morp);
        }
Exemplo n.º 4
0
        public IplImage MorphologyImage(IplImage src)
        {
            morp = new IplImage(src.Size, BitDepth.U8, 3);
            bin  = this.Binary(src, 50);

            IplConvKernel element = new IplConvKernel(3, 3, 1, 1, ElementShape.Rect);

            Cv.MorphologyEx(src, morp, bin, element, MorphologyOperation.Gradient, 10);
            return(morp);
        }
Exemplo n.º 5
0
        public Bitmap GetFrame()
        {
            src = cap.QueryFrame();
            IplImage tmp = null;

//            IplImage tmp = new IplImage(src.Size, BitDepth.U8, 1);

            switch (ef)
            {
            case Efect.Normal:
                tmp = src;
                break;

            case Efect.Gray:
                tmp = new IplImage(src.Size, BitDepth.U8, 1);
                Cv.CvtColor(src, tmp, ColorConversion.BgrToGray);
                break;

            case Efect.Canny:
                tmp = new IplImage(src.Size, BitDepth.U8, 1);
                Cv.CvtColor(src, tmp, ColorConversion.BgrToGray);
                Cv.Smooth(tmp, tmp);
                Cv.Canny(tmp, tmp, 200, 10);
                break;

            case Efect.Erode:
                tmp = Cv.Clone(src);
                IplConvKernel element = Cv.CreateStructuringElementEx(3, 3, 2, 2, ElementShape.Rect, null);
                Cv.Erode(src, tmp, element, 2);
                break;
            }

            if (Recording)
            {
                IplImage processed = null;
                if (tmp.ElemChannels == 1)
                {
                    processed = Cv.Clone(src);
                    Cv.CvtColor(tmp, processed, ColorConversion.BayerGrToBgr);
                }
                else
                {
                    processed = tmp;
                }

                rec.WriteFrame(processed);
            }

            return(tmp.ToBitmap());
        }
Exemplo n.º 6
0
 public IplImage CloseMorphology(IplImage src_tmp)
 {
     // 구조 요소를 지정하고, 닫힘 모폴로지 연산을 행한다
     //(1) 화상 읽어들여, 연산 결과 화상 영역의 확보를 행한다
     using (IplImage srcImg = src_tmp.Clone())
         using (IplImage dstImg = srcImg.Clone())
             using (IplImage tmpImg = srcImg.Clone())
             {
                 //(2) 구조 요소를 생성한다
                 IplConvKernel element = Cv.CreateStructuringElementEx(9, 9, 4, 4, ElementShape.Rect, null);
                 //(3) 모폴로지 연산을 실행한다
                 Cv.MorphologyEx(srcImg, dstImg, tmpImg, element, MorphologyOperation.Close, 1);
                 morph = dstImg.Clone();
             }
     return(morph);
 }
Exemplo n.º 7
0
        //* 이진화조정 *//
        private void button_Threshold_Click(object sender, EventArgs e)
        {
            IplConvKernel element = new IplConvKernel(3, 3, 1, 1, ElementShape.Rect, null);

            if (checkBox_isNegative.Checked == false)
            {
                beforeErode.Erode(thresholdForm.srcImage, element, trackBar_Bold.Value);
            }
            else
            {
                beforeErode.Dilate(thresholdForm.srcImage, element, trackBar_Bold.Value);
            }


            thresholdForm.Show(this);
        }
Exemplo n.º 8
0
        public Morphology()
        {
            // cvMorphologyEx
            // 構造要素を指定して,様々なモルフォロジー演算を行なう

            //(1)画像の読み込み,演算結果画像領域の確保を行なう
            using (IplImage srcImg = new IplImage(Const.ImageLenna, LoadMode.AnyDepth | LoadMode.AnyColor))
                using (IplImage dstImgDilate = srcImg.Clone())
                    using (IplImage dstImgErode = srcImg.Clone())
                        using (IplImage dstImgOpening = srcImg.Clone())
                            using (IplImage dstImgClosing = srcImg.Clone())
                                using (IplImage dstImgGradient = srcImg.Clone())
                                    using (IplImage dstImgTophat = srcImg.Clone())
                                        using (IplImage dstImgBlackhat = srcImg.Clone())
                                            using (IplImage tmpImg = srcImg.Clone())
                                            {
                                                //(2)構造要素を生成する
                                                IplConvKernel element = Cv.CreateStructuringElementEx(9, 9, 4, 4, ElementShape.Rect, null);
                                                //(3)各種のモルフォロジー演算を実行する
                                                Cv.Dilate(srcImg, dstImgDilate, element, 1);
                                                Cv.Erode(srcImg, dstImgErode, element, 1);
                                                Cv.MorphologyEx(srcImg, dstImgOpening, tmpImg, element, MorphologyOperation.Open, 1);
                                                Cv.MorphologyEx(srcImg, dstImgClosing, tmpImg, element, MorphologyOperation.Close, 1);
                                                Cv.MorphologyEx(srcImg, dstImgGradient, tmpImg, element, MorphologyOperation.Gradient, 1);
                                                Cv.MorphologyEx(srcImg, dstImgTophat, tmpImg, element, MorphologyOperation.TopHat, 1);
                                                Cv.MorphologyEx(srcImg, dstImgBlackhat, tmpImg, element, MorphologyOperation.BlackHat, 1);

                                                //(4)モルフォロジー演算結果を表示する
                                                using (new CvWindow("src", srcImg))
                                                    using (new CvWindow("dilate", dstImgDilate))
                                                        using (new CvWindow("erode", dstImgErode))
                                                            using (new CvWindow("opening", dstImgOpening))
                                                                using (new CvWindow("closing", dstImgClosing))
                                                                    using (new CvWindow("gradient", dstImgGradient))
                                                                        using (new CvWindow("tophat", dstImgTophat))
                                                                            using (new CvWindow("blackhat", dstImgBlackhat))
                                                                            {
                                                                                Cv.WaitKey(0);
                                                                            }
                                            }
        }
Exemplo n.º 9
0
        public void PreProcess()
        {
            IplConvKernel element = Cv.CreateStructuringElementEx(21, 3, 10, 2, ElementShape.Rect, null);

            timg = new IplImage(src.Size, BitDepth.U8, 1);
            IplImage temp = timg.Clone();
            IplImage dest = timg.Clone();

            src.CvtColor(timg, ColorConversion.RgbaToGray);
            pimg = timg.Clone();
            //Cv.Threshold(pimg, pimg, 128, 255, ThresholdType.Binary | ThresholdType.Otsu);
            Cv.Smooth(timg, timg, SmoothType.Gaussian);
            Cv.MorphologyEx(timg, dest, temp, element, MorphologyOperation.TopHat, 1);

            Cv.Threshold(dest, timg, 128, 255, ThresholdType.Binary | ThresholdType.Otsu);
            Cv.Smooth(timg, dest, SmoothType.Median);
            Cv.Dilate(dest, dest, element, 2);

            Cv.ReleaseImage(temp);
            Cv.ReleaseImage(dest);
        }
Exemplo n.º 10
0
        public override IObservable <IplImage> Process(IObservable <IplImage> source)
        {
            var propertyChanged = Observable.FromEventPattern <EventArgs>(
                handler => PropertyChanged += new EventHandler(handler),
                handler => PropertyChanged -= new EventHandler(handler));

            return(Observable.Defer(() =>
            {
                IplImage temp = null;
                IplConvKernel strel = null;
                bool updateStrel = false;
                var update = propertyChanged.Subscribe(xs => updateStrel = true);
                return source.Select(input =>
                {
                    if (strel == null || updateStrel)
                    {
                        var size = Size;
                        var anchor = Anchor;
                        updateStrel = false;
                        if (strel != null)
                        {
                            strel.Close();
                        }
                        strel = new IplConvKernel(
                            size.Width,
                            size.Height,
                            anchor.X < 0 ? size.Width / 2 : anchor.X,
                            anchor.Y < 0 ? size.Height / 2 : anchor.Y,
                            Shape);
                    }

                    var output = new IplImage(input.Size, input.Depth, input.Channels);
                    temp = IplImageHelper.EnsureImageFormat(temp, input.Size, input.Depth, input.Channels);
                    CV.MorphologyEx(input, output, temp, strel, Operation, Iterations);
                    return output;
                }).Finally(update.Dispose);
            }));
        }
Exemplo n.º 11
0
        private CvBlobs PreProcessImage1(IplImage mainSubImage, IplImage imgGray)
        {
            CvBlobs  blobs    = null;
            IplImage tmpImage = null;
            IplImage gray     = null;
            IplImage tgray    = null;
            IplImage labelImg = null;
            IplImage temp     = null;

            try
            {
                tgray = imgGray.Clone();
                gray  = new IplImage(tgray.Size, tgray.Depth, 1);
                Cv.Smooth(tgray, tgray, SmoothType.Gaussian);
                Cv.Canny(tgray, gray, 500, 2, ApertureSize.Size5);
                temp = gray.Clone();
                //IplConvKernel element = Cv.CreateStructuringElementEx(5, 1, 3, 0, ElementShape.Rect, null);
                IplConvKernel element = Cv.CreateStructuringElementEx(7, 1, 3, 0, ElementShape.Rect, null);
                Cv.MorphologyEx(gray, gray, temp, element, MorphologyOperation.BlackHat, 1);
                Cv.Threshold(gray, gray, 100, 255, ThresholdType.Binary | ThresholdType.Otsu);
                Cv.Smooth(gray, gray, SmoothType.Gaussian);


                labelImg = new IplImage(mainSubImage.Size, CvBlobLib.DepthLabel, 1);
                blobs    = new CvBlobs();
                CvBlobLib.Label(gray, labelImg, blobs);
                CvBlobLib.FilterByArea(blobs, 1550, 4850);

                tmpImage = mainSubImage.Clone();
                //CvTracks tracks = new CvTracks();
                //CvBlobLib.UpdateTracks(blobs, tracks, 200.0, 5);
                //CvBlobLib.RenderTracks(tracks, tmpImage, tmpImage, RenderTracksMode.ID);
                blobs.RenderBlobs(labelImg, mainSubImage, tmpImage, RenderBlobsMode.BoundingBox | RenderBlobsMode.Angle);

                /*
                 * img.SetROI(subImageRect);
                 * Cv.Copy(tmpImage, img);
                 * img.ResetROI();
                 * Cv.ReleaseImage(tmpImage);
                 *
                 */
            }
            finally
            {
                if (null != temp)
                {
                    Cv.ReleaseImage(temp);
                }

                if (null != tgray)
                {
                    Cv.ReleaseImage(tgray);
                }

                if (null != gray)
                {
                    Cv.ReleaseImage(gray);
                }

                if (null != labelImg)
                {
                    Cv.ReleaseImage(labelImg);
                }

                if (null != tmpImage)
                {
                    Cv.ReleaseImage(tmpImage);
                }
            }

            return(blobs);
        }
Exemplo n.º 12
0
        private CvBlobs PreProcessImage2_old(IplImage img)
        {
            CvBlobs       blobs     = null;
            IplConvKernel element   = null;
            IplImage      temp      = null;
            IplImage      dest      = null;
            IplImage      tmpImage  = null;
            IplImage      tmpImage2 = null;
            IplImage      labelImg  = null;

            try
            {
                element  = Cv.CreateStructuringElementEx(180, 5, 90, 1, ElementShape.Rect, null);
                tmpImage = new IplImage(img.Size, BitDepth.U8, 1);
                temp     = tmpImage.Clone();
                dest     = tmpImage.Clone();
                img.CvtColor(tmpImage, ColorConversion.RgbaToGray);
                tmpImage.Rectangle(new CvPoint(0, 0), new CvPoint((Int32)(tmpImage.Size.Width), (Int32)((tmpImage.Size.Height / 9) * 3)), new CvScalar(255, 255, 255), -1);
                tmpImage.Rectangle(new CvPoint(0, (Int32)((tmpImage.Size.Height / 5) * 4)), new CvPoint((Int32)(tmpImage.Size.Width), (Int32)(tmpImage.Size.Height)), new CvScalar(255, 255, 255), -1);
                tmpImage.Rectangle(new CvPoint((Int32)((tmpImage.Size.Width / 9) * 7), 0), new CvPoint((Int32)((tmpImage.Size.Width)), (Int32)(tmpImage.Size.Height)), new CvScalar(255, 255, 255), -1);
                Cv.Smooth(tmpImage, tmpImage, SmoothType.Gaussian);
                Cv.MorphologyEx(tmpImage, dest, temp, element, MorphologyOperation.TopHat, 1);
                Cv.Threshold(dest, tmpImage, 128, 255, ThresholdType.Binary | ThresholdType.Otsu);
                Cv.Smooth(tmpImage, dest, SmoothType.Median);


                labelImg  = new IplImage(img.Size, CvBlobLib.DepthLabel, 1);
                blobs     = new CvBlobs();
                tmpImage2 = tmpImage.Clone();
                CvBlobLib.Label(tmpImage2, labelImg, blobs);

                //Cv.ReleaseImage(tmpImage);
                //tmpImage = img.Clone();
                //blobs.RenderBlobs(labelImg, img, tmpImage);
                //tmpImage.SaveImage(@"c:\temp\newImages\RenderBlobsNOFiltered.png");


                CvBlobLib.FilterByArea(blobs, 850, 4850);
                Cv.ReleaseImage(tmpImage);
                tmpImage = img.Clone();
                //CvTracks tracks = new CvTracks();
                //CvBlobLib.UpdateTracks(blobs, tracks, 200.0, 5);
                //CvBlobLib.RenderTracks(tracks, tmpImage, tmpImage, RenderTracksMode.ID);
                blobs.RenderBlobs(labelImg, img, tmpImage, RenderBlobsMode.BoundingBox | RenderBlobsMode.Angle);
                //tmpImage.SaveImage(@"c:\temp\newImages\RenderBlobsFiltered.png");
            }
            finally
            {
                if (null != temp)
                {
                    Cv.ReleaseImage(temp);
                }

                if (null != dest)
                {
                    Cv.ReleaseImage(dest);
                }

                if (null != tmpImage)
                {
                    Cv.ReleaseImage(tmpImage);
                }

                if (null != tmpImage2)
                {
                    Cv.ReleaseImage(tmpImage2);
                }

                if (null != labelImg)
                {
                    Cv.ReleaseImage(labelImg);
                }
            }


            return(blobs);
        }
Exemplo n.º 13
0
        unsafe List <IplImage> MoveBall(HashSet <int> orgUncoloredPixels, int[] labelMap, int radius, BitmapIterator edgeIter, BitmapIterator orgIter, int[] ballSizeList)
        {
            System.Diagnostics.Stopwatch sw  = System.Diagnostics.Stopwatch.StartNew();
            System.Diagnostics.Stopwatch sw2 = new Stopwatch();

            List <IplImage> segments           = new List <IplImage>();
            byte *          edgeData           = (byte *)edgeIter.PixelData;
            int             ballSize           = 1 + 2 * radius;
            HashSet <int>   uncoloredPixels    = new HashSet <int>(orgUncoloredPixels);
            Queue <int>     uncoloredPixelList = new Queue <int>(orgUncoloredPixels);

            while (uncoloredPixels.Count >= 1)
            {
                int idx = uncoloredPixelList.Dequeue();
                if (!uncoloredPixels.Contains(idx))
                {
                    continue;
                }
                uncoloredPixels.Remove(idx);

                int x, y;
                y = Math.DivRem(idx, edgeImage.Width, out x);

                // ボールが入るか
                if (ballSizeList[idx] < ballSize)
                {
                    continue;
                }

                // 彩色済みなら塗らない
                bool colored = false;
                for (int yy = y; yy < y + ballSize; yy++)
                {
                    for (int xx = x; xx < x + ballSize; xx++)
                    {
                        if (!(y == yy && x == xx) && !uncoloredPixels.Contains(xx + yy * edgeImage.Width))
                        {
                            colored = true;
                            goto COLORED_DECIDED;
                        }
                    }
                }

COLORED_DECIDED:

                if (colored)
                {
                    continue;
                }

                // 新しいセグメント
                IplImage segmentImage = new IplImage(edgeImage.Width, edgeImage.Height, BitDepth.U8, 4);
                Cv.Set(segmentImage, new CvScalar(0, 0, 0, 0));

                Random rand = new Random();
                Color  c    = Color.FromArgb(rand.Next(255) + 1, rand.Next(255) + 1, rand.Next(255) + 1);

                Cv.Rectangle(segmentImage, x + 1, y + 1, x + ballSize - 1, y + ballSize - 1, Cv.RGB(c.R, c.G, c.B));

                // 新しい色でFlood fill
                HashSet <int> initPoints = new HashSet <int>();
                for (int yy = y; yy < y + ballSize - 1; yy++)
                {
                    initPoints.Add(x + yy * edgeImage.Width);
                    initPoints.Add((x + ballSize - 1) + yy * edgeImage.Width);
                }
                for (int xx = x; xx < x + ballSize - 1; xx++)
                {
                    initPoints.Add(xx + y * edgeImage.Width);
                    initPoints.Add(xx + (y + ballSize - 1) * edgeImage.Width);
                }

                CvRect roi = FloodFill(initPoints, segmentImage, c, uncoloredPixels, orgIter, sw2);



                sw2.Start();

                Cv.SetImageROI(segmentImage, roi);
                {
                    IplConvKernel kernel = new IplConvKernel(2 * radius + 1, 2 * radius + 1, radius, radius, ElementShape.Ellipse);
                    Cv.Erode(segmentImage, segmentImage, kernel, 1);
                    Cv.Dilate(segmentImage, segmentImage, kernel, 1);
                }
                Cv.ResetImageROI(segmentImage);


                for (int yy = roi.Y; yy < roi.Bottom; yy++)
                {
                    for (int xx = roi.X; xx < roi.Right; xx++)
                    {
                        if (segmentImage.ImageDataPtr[4 * xx + yy * segmentImage.WidthStep + 3] != 0)
                        {
                            int idx2 = xx + yy * edgeImage.Width;
                            labelMap[idx2] = segmentCnt;
                            uncoloredPixels.Remove(idx2);
                            orgUncoloredPixels.Remove(idx2);
                        }
                    }
                }
                segmentCnt++;

                segments.Add(segmentImage);

                sw2.Stop();
            }
            //if (segments.Count >= 1) segments.First().Save(radius + ".png");

            //            if (sw.ElapsedMilliseconds >= 1000)
            //          {
            //            Console.WriteLine("[radius=" + radius + "]");
            //          Console.WriteLine("total= " + sw.ElapsedMilliseconds + " ms");
            //        Console.WriteLine("total= " + sw2.ElapsedMilliseconds + " ms");
            //  }
            return(segments);
        }
Exemplo n.º 14
0
        // affect a CvMat in an unspeakable way (maybe this: http://ieeexplore.ieee.org/document/4062288/?reload=true)
        // FIX : looks like there's lots of room for optimization
        // NOTE : It doesn't just look like it, IT'S P.A.N.A.R.G.O.
        // <= returns 32bit float greyscale
        static public CvMat IBO(CvMat image)
        {
            int   imageRows = image.Rows;
            int   imageCols = image.Cols;
            CvMat IBOsub    = new CvMat(imageRows, imageCols, MatrixType.F32C1, new CvScalar(0));

            const int kernelCols = 3;
            const int kernelRows = 3;

            int  x, y, k, l;
            bool firstElement;
            int  a1, b1, a2, b2;

            a1 = (kernelCols - 1) / 2;
            b1 = a1 + 1;
            a2 = (kernelRows - 1) / 2;
            b2 = a2 + 1;

            //convert g(x,y) = Z = Ð(f(x,y))  for 8 values surrounding the x,y value...
            for (x = a1; x < imageCols - a1; x++)
            {
                for (y = a1; y < imageRows - a1; y++)
                {
                    firstElement = true;

                    for (k = x - a1; k < x + b1; k++)
                    {
                        for (l = y - a2; l < y + b2; l++)
                        {
                            double val = image.GetReal2D(l, k);
                            if (firstElement)
                            {
                                IBOsub.SetReal2D(y, x, val);                                  // * image.at<float>(l,k);
                                firstElement = false;
                            }
                            else
                            {
                                IBOsub.SetReal2D(y, x, IBOsub.GetReal2D(y, x) * val);                                  // originally there was multiplication not addition
                                // TODO : I changed back to multiplication, because addition gave back white image. Great?
                            }
                        }
                    }
                }
            }

            // TODO : I subtracted this addition because it seemed too much. Good?
            ////////this is an addition by us
#if (false)
            {
                for (x = a1; x < imageCols - a1; x++)
                {
                    for (y = a1; y < imageRows - a1; y++)
                    {
                        double sqr = IBOsub.GetReal2D(y, x);
                        sqr *= sqr;
                        IBOsub.SetReal2D(y, x, sqr);
                    }
                }
            }
#endif
            for (x = 0; x < imageCols; x++)
            {
                for (k = 0; k < a1; k++)
                {
                    IBOsub.SetReal2D(k, x, IBOsub.GetReal2D(a1, x));
                    IBOsub.SetReal2D(imageRows - (k + 1), x, IBOsub.GetReal2D(imageRows - (a1 + 1), x));
                }
            }

            for (y = 0; y < imageRows; y++)
            {
                for (k = 0; k < a2; k++)
                {
                    IBOsub.SetReal2D(y, k, IBOsub.GetReal2D(y, a2));
                    IBOsub.SetReal2D(y, imageCols - (k + 1), IBOsub.GetReal2D(y, imageCols - (a2 + 1)));
                }
            }

            // find the max value of the mat
            double  minVal, maxVal;
            CvPoint minLoc, maxLoc;
            Cv.MinMaxLoc(IBOsub, out minVal, out maxVal, out minLoc, out maxLoc);
            image = (IBOsub * (1.0 / maxVal));                                        // by this function, image is now a new object

            IplConvKernel element = new IplConvKernel(3, 3, 1, 1, ElementShape.Rect); // simply returns a predefined shape-in-a-Mat
            Cv.Dilate(image, image, element);

            return(image);
        }
Exemplo n.º 15
0
 internal static extern void cvDilate(Arr src, Arr dst, IplConvKernel element, int iterations);
Exemplo n.º 16
0
        // => inputMat MUST be 24/32 bit
        private CvMat processFrame(CvMat inputMat)
        {
            // return "inputMat" after lots. LOTS. Of processing

            width  = inputMat.Cols;
            height = inputMat.Rows;

            // taking out 4% of the input's edges: sounds wrong
#if false
            // I have no idea what on earth is the purpose of this:
            //CvMat temp2 = inputMat( new CvRect( inputMat.Cols / 25, inputMat.Cols / 25, inputMat.Cols - 2 * (inputMat.Cols / 25), inputMat.Rows - 2 * (inputMat.Rows / 25) ) );
            //resize( temp2, temp2, inputMat.size() );
            //temp2.copyTo( inputMat );
            int    borderX = inputMat.Cols / 25;          // 4% of original
            int    borderY = inputMat.Rows / 25;
            CvRect roi     = new CvRect(borderX, borderY, inputMat.Cols - 2 * borderX, inputMat.Rows - 2 * borderY);
            CvMat  temp2   = inputMat.GetSubRect(out temp2, roi);            // stupid to pass "out temp2"?
            inputMat = temp2;
            // =TODO : What? temp2.Copy( inputMat );
            // is it really required to remove 4% of the input image's edges?
#endif

            CvMat inputMat_grey;
            {
                // TODO : looks like a waste to make two conversions from inputMat to _grey, instead of 1
                // since OpenCV doesn't support it, it could be made manually
                CvMat inputMat_grey8 = MatOps.ConvertChannels(inputMat);
                inputMat_grey = MatOps.ConvertElements(inputMat_grey8, MatrixType.F32C1, 1.0 / 255.0);
            }

            // NOTE : IBO seems to give good contrast with certain images, but with bbox7, it is just disastrous.
            //MatOps.NewWindowShow( inputMat_grey );
            //inputMat_grey = Filters.IBO( inputMat_grey ); // inputMat_grey = 32f
            //MatOps.NewWindowShow( inputMat_grey );
            inputMat_grey = MatOps.ConvertElements(inputMat_grey, MatrixType.U8C1, 255);               // inputMat_grey = 8u
            // was: SLOW : Filters.ContrastEnhancement( inputMat_grey ); // NOTE : not needed AFTER IBO
            // NOTE : Contrast Enhancement2 may NOT be needed AT ALL, at this point at least, ANYWAY!!!
            Filters.ContrastEnhancement2(inputMat_grey);               // NOTE : certainly NOT needed AFTER IBO
            MatOps.NewWindowShow(inputMat_grey);

            // mask passed originally in method below was all white, so I optimized it out. Passing the number of pixels was also dumb-o.
            double thresh = Filters.NeighborhoodValleyEmphasis(inputMat_grey);
            Cv.Threshold(inputMat_grey, inputMat_grey, thresh, 255, ThresholdType.BinaryInv);

            IplConvKernel element = new IplConvKernel(3, 3, 1, 1, ElementShape.Cross);
            Cv.Erode(inputMat_grey, inputMat_grey, element);
            Cv.Dilate(inputMat_grey, inputMat_grey, element);
            MatOps.NewWindowShow(inputMat_grey);

            // TODO : check if check is required
            if (inputMat_grey.ElemType != MatrixType.U8C1)
            {
                inputMat_grey = MatOps.ConvertElements(inputMat_grey, MatrixType.U8C1, 255.0);
            }

            // =======
            // is this just a test?
            CvPoint[] newPtV = Filters.DistillContours(inputMat_grey, 5, Const.PointZero);
            CvMat     imageDest;
            using (CvMemStorage storage = new CvMemStorage())
            {
                CvSeq <CvPoint> updateContours = CvSeq <CvPoint> .FromArray(newPtV, SeqType.Contour, storage);

                imageDest = new CvMat(inputMat.Rows, inputMat.Cols, MatrixType.U8C1);
                Cv.DrawContours(imageDest, updateContours, Const.ScalarWhite, 0, 100, 16);
            }
            // =======

            kawane(newPtV);               // updates thresholdDist, minMaskY, final4P

            //*******************************************set a greater contour for estimation of the missing points*******************************//

            // =======
            newPtV = Filters.DistillContours(inputMat_grey, 100, Const.PointZero);
            using (CvMemStorage storage = new CvMemStorage())
            {
                CvSeq <CvPoint> updateContours = CvSeq <CvPoint> .FromArray(newPtV, SeqType.Contour, storage);

                imageDest = new CvMat(inputMat.Rows, inputMat.Cols, MatrixType.U8C1);
                Cv.DrawContours(imageDest, updateContours, Const.ScalarWhite, 0, 100, 1, LineType.AntiAlias);
            }
            // =======

            CvMat mask1 = new CvMat(inputMat.Rows, inputMat.Cols, MatrixType.U8C1, 0);
            Cv.FillConvexPoly(mask1, newPtV, Const.ScalarWhite, 0, 0);

            temp = MatOps.ConvertChannels(inputMat);
            temp.Copy(imageDest, mask1);
            Cv.Canny(imageDest, imageDest, 150, 300, ApertureSize.Size3);
            IplConvKernel element2 = new IplConvKernel(3, 3, 1, 1, ElementShape.Rect);
            Cv.Dilate(imageDest, imageDest, element2);
            Cv.Erode(imageDest, imageDest, element2);

            CvLineSegmentPoint[] lines = Cv2.HoughLinesP(new Mat(imageDest), 1, Cv.PI / 180 /*NOTE : 1 degree angle*/, 50, 50, 50); // TODO : those 50s..?
            extendLines(lines, 350);                                                                                                // TODO : This idea sounds arbitary? And why 350? At least some percentage?

            // draw extended lines
            for (int i = 0; i < lines.Length; ++i)
            {
                CvLineSegmentPoint l = lines[i];
                Cv.Line(imageDest, l.P1, l.P2, Const.ScalarWhite, 1, LineType.AntiAlias);
            }

            Cv.Dilate(imageDest, imageDest, element2);               // TODO : FIX : Dilate again?!

            // another huge function here...
            fourPoints(lines);

            ////////////

            //********************************************************************* replace estimate points with mask corners ********//
            if (oldPt.Count != 0)
            {
                //**
                // BEWARE : great use of the English language following right below:
                // test for each and every one of the last slice delete each one of all the revisited of the above and estimate for only the best the off topic adapt
                //**
                List <int> positions = new List <int>(final4P.Count);
                for (int i = 0; i < final4P.Count; ++i)
                {
                    positions.Add(-1);                       // "initialize" positions[i]
                    double distmin = 10000;
                    for (int j = 0; j < oldPt.Count; ++j)
                    {
                        double distAB = PointOps.Norm(oldPt[j] - final4P[i]);
                        if (distAB < distmin)
                        {
                            distmin      = distAB;
                            positions[i] = j;
                        }
                    }
                }
                int flagFrCounter = 0;
                for (int i = 0; i < final4P.Count; ++i)
                {
                    double distA = PointOps.Norm(oldPt[positions[i]] - final4P[i]);
                    //********************* threshold pou na orizei tin megisti perioxi gia anazitisi,alliws na krataei to proigoumeno simeio*******//

                    if (distA < thresholdDist)                     //if(distA<80)
                    {
                        oldPt[positions[i]] = final4P[i];
                        --flagFrCounter;
                    }
                    ++flagFrCounter;
                }
                if (reset)
                {
                    numFrames = 0;
                    oldPt.Clear();
                    final4P.Clear();
                }
            }
            //pointsb[0]=thresholdDist;
            //****************************************************************************//

            for (int i = 0; i < oldPt.Count; ++i)
            {
                Cv.Circle(temp, oldPt[i], 2, Const.ScalarRed, 3);
            }
            MatOps.Convert8To24(temp).Copy(inputMat);
            //MatOps.ConvertChannels( temp, ColorConversion.GrayToBgr ).Copy( inputMat );
            //temp.Copy( inputMat );



            //******************************************************OVERLAY IMAGE***********************************************//////
            if (oldPt.Count == 0)
            {
                return(inputMat);                // end of line
            }
            CvMat black2;
            if (overlay != null)
            {
                black2 = overlay.Clone();                                   //=imread("cubes.jpg");
                Cv.Resize(black2, inputMat, Interpolation.NearestNeighbor); // TODO : check if interpolation type is appropriate
            }
            else
            {
                black2 = new CvMat(inputMat.Rows, inputMat.Cols, MatrixType.U8C3);
            }

            List <CvPoint> tempPoint = new List <CvPoint>(4);
            //vector<Point> tempPoint;
            int pp = 0;

            // BEWARE : the guy is copy/pasting needlessly?
            int mini = 1000000;
            for (int i = 0; i < oldPt.Count; ++i)
            {
                if (oldPt[i].Y < mini)
                {
                    mini = oldPt[i].Y;
                    pp   = i;
                }
            }
            tempPoint.Add(oldPt[pp]);
            mini = 1000000;
            for (int i = 0; i < oldPt.Count; ++i)
            {
                if (oldPt[i].Y < mini && oldPt[i] != tempPoint[0])
                {
                    mini = oldPt[i].Y;
                    pp   = i;
                }
            }
            tempPoint.Add(oldPt[pp]);
            mini = 1000000;
            for (int i = 0; i < oldPt.Count; ++i)
            {
                int tempmini = Math.Abs(oldPt[i].X - tempPoint[1].X);
                if (tempmini < mini && oldPt[i] != tempPoint[0] && oldPt[i] != tempPoint[1])
                {
                    mini = tempmini;
                    pp   = i;
                }
            }
            tempPoint.Add(oldPt[pp]);

            for (int i = 0; i < oldPt.Count; ++i)
            {
                CvPoint pt    = oldPt[i];
                bool    found = false;
                for (int j = 0; j < tempPoint.Count; ++j)
                {
                    if (tempPoint[j] == pt)
                    {
                        found = true; break;
                    }
                }
                if (!found)
                {
                    tempPoint.Add(pt);
                }
            }

            // only keep up to 4 points
            List <CvPoint> co_ordinates = new List <CvPoint>(4);
            {
                int maxIndex = Math.Min(4, tempPoint.Count);
                for (int i = 0; i < maxIndex; ++i)
                {
                    co_ordinates.Add(tempPoint[i]);
                }
            }

            // lost me...
            if (outputQuad[0] == outputQuad[2])
            {
                {
                    int maxIndex = Math.Min(4, tempPoint.Count);
                    for (int i = 0; i < maxIndex; ++i)
                    {
                        outputQuad[i] = tempPoint[i];
                    }
                }
            }
            else
            {
                CvPoint2D32f rr;
                for (int i = 0; i < 4; ++i)
                {
                    List <double> dist = new List <double>(tempPoint.Count);
                    for (int j = 0; j < tempPoint.Count; ++j)
                    {
                        rr = tempPoint[j];
                        dist.Add(PointOps.Norm(outputQuad[i] - rr));
                    }

                    double minimumDist = dist.Min();
                    int    min_pos     = Utils.FindIndex(dist, minimumDist);
                    if (tempPoint.Count > 0)
                    {
                        outputQuad[i] = tempPoint[min_pos];
                        tempPoint.RemoveAt(min_pos);
                    }
                }
            }


            // The 4 points where the mapping is to be done , from top-left in clockwise order
            inputQuad[0] = new CvPoint2D32f(0, 0);
            inputQuad[1] = new CvPoint2D32f(inputMat.Cols - 1, 0);
            inputQuad[2] = new CvPoint2D32f(inputMat.Cols - 1, inputMat.Rows - 1);
            inputQuad[3] = new CvPoint2D32f(0, inputMat.Rows - 1);
            //Input and Output Image;


            // Get the Perspective Transform Matrix i.e. lambda (2D warp transform)
            // Lambda Matrix
            CvMat lambda = Cv.GetPerspectiveTransform(inputQuad, outputQuad);
            // Apply this Perspective Transform to the src image
            // - get a "top-down" view of the supposedly box-y area
            Cv.WarpPerspective(black2, black2, lambda, Interpolation.Cubic, Const.ScalarBlack);
            // see nice explanation : http://www.pyimagesearch.com/2014/08/25/4-point-opencv-getperspective-transform-example/


            CvMat maskOV = new CvMat(inputMat.Rows, inputMat.Cols, MatrixType.U8C1, Const.ScalarBlack);
            using (CvMemStorage storage = new CvMemStorage())
            {
                CvSeq <CvPoint> updateContours = CvSeq <CvPoint> .FromArray(co_ordinates, SeqType.Contour, storage);

                imageDest = new CvMat(inputMat.Rows, inputMat.Cols, MatrixType.U8C1);
                Cv.DrawContours(maskOV, updateContours, Const.ScalarWhite, 0, 100, 16);
                //drawContours( maskOV, co_ordinates, 0, Scalar( 255 ), CV_FILLED, 8 );
            }

            double alpha = 0.8;
            double beta  = (1.0 - alpha);
            Cv.AddWeighted(black2, alpha, inputMat, beta, 0.0, black2);
            black2.Copy(inputMat, maskOV);

            return(inputMat);
        }
    //  Use the CamShift algorithm to track to base histogram throughout the
    // succeeding frames
    void CalculateCamShift(CvMat _image)
    {
        CvMat _backProject = CalculateBackProjection(_image, _histogramToTrack);

        // Create convolution kernel for erosion and dilation
        IplConvKernel elementErode  = Cv.CreateStructuringElementEx(10, 10, 5, 5, ElementShape.Rect, null);
        IplConvKernel elementDilate = Cv.CreateStructuringElementEx(4, 4, 2, 2, ElementShape.Rect, null);

        // Try eroding and then dilating the back projection
        // Hopefully this will get rid of the noise in favor of the blob objects.
        Cv.Erode(_backProject, _backProject, elementErode, 1);
        Cv.Dilate(_backProject, _backProject, elementDilate, 1);


        if (backprojWindowFlag)
        {
            Cv.ShowImage("Back Projection", _backProject);
        }

        // Parameters returned by Camshift algorithm
        CvBox2D         _outBox;
        CvConnectedComp _connectComp;

        // Set the criteria for the CamShift algorithm
        // Maximum 10 iterations and at least 1 pixel change in centroid
        CvTermCriteria term_criteria = Cv.TermCriteria(CriteriaType.Iteration | CriteriaType.Epsilon, 10, 1);

        // Draw object center based on Kalman filter prediction
        CvMat _kalmanPrediction = _kalman.Predict();

        int predictX = Mathf.FloorToInt((float)_kalmanPrediction.GetReal2D(0, 0));
        int predictY = Mathf.FloorToInt((float)_kalmanPrediction.GetReal2D(1, 0));

        // Run the CamShift algorithm
        if (Cv.CamShift(_backProject, _rectToTrack, term_criteria, out _connectComp, out _outBox) > 0)
        {
            // Use the CamShift estimate of the object center to update the Kalman model
            CvMat _kalmanMeasurement = Cv.CreateMat(2, 1, MatrixType.F32C1);
            // Update Kalman model with raw data from Camshift estimate
            _kalmanMeasurement.Set2D(0, 0, _outBox.Center.X); // Raw X position
            _kalmanMeasurement.Set2D(1, 0, _outBox.Center.Y); // Raw Y position
                                                              //_kalmanMeasurement.Set2D (2, 0, _outBox.Center.X - lastPosition.X);
                                                              //_kalmanMeasurement.Set2D (3, 0, _outBox.Center.Y - lastPosition.Y);

            lastPosition.X = Mathf.FloorToInt(_outBox.Center.X);
            lastPosition.Y = Mathf.FloorToInt(_outBox.Center.Y);

            _kalman.Correct(_kalmanMeasurement); // Correct Kalman model with raw data

            // CamShift function returns two values: _connectComp and _outBox.

            //	_connectComp contains is the newly estimated position and size
            //  of the region of interest. This is passed into the subsequent
            // call to CamShift
            // Update the ROI rectangle with CamShift's new estimate of the ROI
            _rectToTrack = CheckROIBounds(_connectComp.Rect);

            // Draw a rectangle over the tracked ROI
            // This method will draw the rectangle but won't rotate it.
            _image.DrawRect(_rectToTrack, CvColor.Aqua);
            _image.DrawMarker(predictX, predictY, CvColor.Aqua);

            // _outBox contains a rotated rectangle esimating the position, size, and orientation
            // of the object we want to track (specified by the initial region of interest).
            // We then take this estimation and draw a rotated bounding box.
            // This method will draw the rotated rectangle
            rotatedBoxToTrack = _outBox;

            // Draw a rotated rectangle representing Camshift's estimate of the
            // object's position, size, and orientation.
            _image.DrawPolyLine(rectangleBoxPoint(_outBox.BoxPoints()), true, CvColor.Red);
        }
        else
        {
            //Debug.Log ("Object lost by Camshift tracker");

            _image.DrawMarker(predictX, predictY, CvColor.Purple, MarkerStyle.CircleLine);

            _rectToTrack = CheckROIBounds(new CvRect(predictX - Mathf.FloorToInt(_rectToTrack.Width / 2),
                                                     predictY - Mathf.FloorToInt(_rectToTrack.Height / 2),
                                                     _rectToTrack.Width, _rectToTrack.Height));
            _image.DrawRect(_rectToTrack, CvColor.Purple);
        }

        if (trackWindowFlag)
        {
            Cv.ShowImage("Image", _image);
        }
    }
Exemplo n.º 18
0
        //* TrackBar _ Erode *//
        private void trackBar_Erode_Scroll(object sender, EventArgs e)
        {
            IplConvKernel element = new IplConvKernel(3, 3, 1, 1, ElementShape.Rect, null);

            if (checkBox_isNegative.Checked == false)
            {
                if (trackBar_Bold.Value == 1)
                {
                    beforeErode.Erode(imgBox, element, 1);
                    pictureBox.ImageIpl = imgBox;
                }
                else if (trackBar_Bold.Value == 2)
                {
                    beforeErode.Erode(imgBox, element, 2);
                    pictureBox.ImageIpl = imgBox;
                }
                else if (trackBar_Bold.Value == 3)
                {
                    beforeErode.Erode(imgBox, element, 3);
                    pictureBox.ImageIpl = imgBox;
                }
                else if (trackBar_Bold.Value == 4)
                {
                    beforeErode.Erode(imgBox, element, 4);
                    pictureBox.ImageIpl = imgBox;
                }
                else if (trackBar_Bold.Value == 5)
                {
                    beforeErode.Erode(imgBox, element, 5);
                    pictureBox.ImageIpl = imgBox;
                }
                else
                {
                    beforeErode.Copy(imgBox);
                    pictureBox.ImageIpl = imgBox;
                }
            }
            else if (checkBox_isNegative.Checked == true)
            {
                if (trackBar_Bold.Value == 1)
                {
                    beforeErode.Dilate(imgBox, element, 1);
                    pictureBox.ImageIpl = imgBox;
                }
                else if (trackBar_Bold.Value == 2)
                {
                    beforeErode.Dilate(imgBox, element, 2);
                    pictureBox.ImageIpl = imgBox;
                }
                else if (trackBar_Bold.Value == 3)
                {
                    beforeErode.Dilate(imgBox, element, 3);
                    pictureBox.ImageIpl = imgBox;
                }
                else if (trackBar_Bold.Value == 4)
                {
                    beforeErode.Dilate(imgBox, element, 4);
                    pictureBox.ImageIpl = imgBox;
                }
                else if (trackBar_Bold.Value == 5)
                {
                    beforeErode.Dilate(imgBox, element, 5);
                    pictureBox.ImageIpl = imgBox;
                }
                else
                {
                    beforeErode.Copy(imgBox);
                    pictureBox.ImageIpl = imgBox;
                }
            }
        }