예제 #1
0
        public void 評価結果画像作成_debug(Mat 検査結果, Mat テンプレート, int[,] 正解座標, ref Mat color_debug)
        {
            Mat res_color    = new Mat(new Size(検査結果.Width, 検査結果.Height), MatType.CV_8UC3, Scalar.All(0));
            var temp_color   = res_color.Clone();
            var result_clone = 検査結果.Clone();


            paint_black(ref result_clone, テンプレート);
            CvBlobs blobs = new CvBlobs(result_clone);

            blobs.FilterByArea(9, 250);
            blobs.RenderBlobs(result_clone, res_color);



            Cv2.CvtColor(テンプレート, temp_color, ColorConversionCodes.GRAY2BGR);
            Cv2.Add(temp_color, res_color, color_debug);

            点数計算_debug(blobs, 正解座標, ref color_debug);

            res_color    = null;
            temp_color   = null;
            blobs        = null;
            result_clone = null;
        }
예제 #2
0
        //輪郭抽出して中心座標取得
        Point GetCenterPointofLED(Mat grayImage)
        {
            OpenCvSharp.CPlusPlus.Point centerPoint = new OpenCvSharp.CPlusPlus.Point();
            IplImage grayIpl  = grayImage.ToIplImage().Clone();
            IplImage calibIpl = new IplImage(grayIpl.Size, BitDepth.U8, 3);
            //中心の検出
            CvBlobs blobs = new CvBlobs();

            blobs.Label(grayIpl);
            //blobs.FilterByArea(20, 1500);
            CvBlob blob = blobs.LargestBlob();

            try
            {
                if (blob != null)
                {
                    centerPoint = new Point(blob.Centroid.X, blob.Centroid.Y);

                    blobs.RenderBlobs(grayIpl, calibIpl);
                }
            }catch {
                Console.WriteLine("eroor:counter");
            }

            this.CalibrationImage = new Mat(calibIpl);
            Console.WriteLine(centerPoint);
            return(centerPoint);
        }
예제 #3
0
        private static void CvBlobsSample()
        {
            var src = new Mat("data/shapes.png", ImreadModes.GrayScale);
            var bin = src.Threshold(0, 255, ThresholdTypes.Otsu);
            var view = bin.CvtColor(ColorConversionCodes.GRAY2BGR);

            var blobs = new CvBlobs(bin);
            blobs.RenderBlobs(bin, view, RenderBlobsMode.Angle | RenderBlobsMode.BoundingBox | RenderBlobsMode.Color);
            Window.ShowImages(bin, view);
        }
예제 #4
0
        private static void CvBlobsSample()
        {
            var src  = new Mat("data/shapes.png", ImreadModes.GrayScale);
            var bin  = src.Threshold(0, 255, ThresholdTypes.Otsu);
            var view = bin.CvtColor(ColorConversionCodes.GRAY2BGR);

            var blobs = new CvBlobs(bin);

            blobs.RenderBlobs(bin, view, RenderBlobsMode.Angle | RenderBlobsMode.BoundingBox | RenderBlobsMode.Color);
            Window.ShowImages(bin, view);
        }
예제 #5
0
        private static void Track()
        {
            using (var video = new CvCapture("data/bach.mp4"))
            {
                IplImage frame        = null;
                IplImage gray         = null;
                IplImage binary       = null;
                IplImage render       = null;
                IplImage renderTracks = null;
                CvTracks tracks       = new CvTracks();
                CvWindow window       = new CvWindow("render");
                CvWindow windowTracks = new CvWindow("tracks");

                for (int i = 0; ; i++)
                {
                    frame = video.QueryFrame();
                    //if (frame == null)
                    //    frame = new IplImage("data/shapes.png");
                    if (gray == null)
                    {
                        gray         = new IplImage(frame.Size, BitDepth.U8, 1);
                        binary       = new IplImage(frame.Size, BitDepth.U8, 1);
                        render       = new IplImage(frame.Size, BitDepth.U8, 3);
                        renderTracks = new IplImage(frame.Size, BitDepth.U8, 3);
                    }

                    render.Zero();
                    renderTracks.Zero();

                    Cv.CvtColor(frame, gray, ColorConversion.BgrToGray);
                    Cv.Threshold(gray, binary, 0, 255, ThresholdType.Otsu);

                    CvBlobs blobs    = new CvBlobs(binary);
                    CvBlobs newBlobs = new CvBlobs(blobs
                                                   .OrderByDescending(pair => pair.Value.Area)
                                                   .Take(200)
                                                   .ToDictionary(pair => pair.Key, pair => pair.Value), blobs.Labels);
                    newBlobs.RenderBlobs(binary, render);
                    window.ShowImage(render);

                    newBlobs.UpdateTracks(tracks, 10.0, Int32.MaxValue);
                    tracks.Render(binary, renderTracks);
                    windowTracks.ShowImage(renderTracks);

                    Cv.WaitKey(200);
                    Console.WriteLine(i);
                }
            }
        }
예제 #6
0
        public void SimpleTest()
        {
            using (var src = new IplImage(@"_data\image\Blob\shapes2.png", LoadMode.GrayScale))
                using (var binary = new IplImage(src.Size, BitDepth.U8, 1))
                    using (var render = new IplImage(src.Size, BitDepth.U8, 3))
                    {
                        Cv.Threshold(src, binary, 0, 255, ThresholdType.Otsu);

                        var blobs = new CvBlobs(binary);
                        blobs.RenderBlobs(src, render);
                        using (new CvWindow(render))
                        {
                            Cv.WaitKey();
                        }
                    }
        }
예제 #7
0
        public void SimpleTest()
        {
            using (var src = new IplImage(@"Image\Blob\shapes2.png", LoadMode.GrayScale))
            using (var binary = new IplImage(src.Size, BitDepth.U8, 1))
            using (var render = new IplImage(src.Size, BitDepth.U8, 3))
            {
                Cv.Threshold(src, binary, 0, 255, ThresholdType.Otsu);

                var blobs = new CvBlobs(binary);
                blobs.RenderBlobs(src, render);
                using (new CvWindow(render))
                {
                    Cv.WaitKey();
                }
            }
        }
예제 #8
0
    // Update is called once per frame
    void Update()
    {
        IplImage frame = Cv.QueryFrame(capture);

        imgBinary  = new IplImage(frame.Size, BitDepth.U8, 1);
        imgLabel   = new IplImage(frame.Size, BitDepth.F32, 1);
        imgRender  = new IplImage(frame.Size, BitDepth.U8, 3);
        imgContour = new IplImage(frame.Size, BitDepth.U8, 3);
        imgPolygon = new IplImage(frame.Size, BitDepth.U8, 3);
        Color[] cols = new Color[texture.width * texture.height];
        Cv.CvtColor(frame, imgBinary, ColorConversion.BgrToGray);
        Cv.Threshold(imgBinary, imgBinary, 100, 255, ThresholdType.Binary);
        CvBlobs blobs  = new CvBlobs();
        uint    result = blobs.Label(imgBinary, imgLabel);

        foreach (KeyValuePair <uint, CvBlob> item in blobs)
        {
            CvBlob b = item.Value;
            //Console.WriteLine ("{0} | Centroid:{1} Area:{2}", item.Key, b.Centroid, b.Area);

            CvContourChainCode cc = b.Contour;
            cc.RenderContourChainCode(imgContour);

            CvContourPolygon polygon = cc.ConvertChainCodesToPolygon();
            foreach (CvPoint p in polygon)
            {
                imgPolygon.Circle(p, 1, CvColor.Red, -1);
            }
        }

        blobs.RenderBlobs(imgLabel, frame, imgRender);

        for (int y = 0; y < texture.height; y++)
        {
            for (int x = 0; x < texture.width; x++)
            {
                CvColor col = imgRender.Get2D(y, x);
                cols[y * texture.width + x] = new Color(col.R / 255.0f, col.G / 255.0f, col.B / 255.0f, 1.0f);
            }
        }
        // int t2 = System.Environment.TickCount;
        texture.SetPixels(cols);
        //int t3 = System.Environment.TickCount;
        //Debug.Log("t2-t1=" + (t2 - t1) + " t3-t2=" + (t3 - t2));
        texture.Apply();
    }
예제 #9
0
파일: Blob.cs 프로젝트: 0sv/opencvsharp
        public Blob()
        {
            using (var imgSrc = new IplImage(FilePath.Image.Shapes, LoadMode.Color))
            using (var imgBinary = new IplImage(imgSrc.Size, BitDepth.U8, 1))
            using (var imgRender = new IplImage(imgSrc.Size, BitDepth.U8, 3))
            using (var imgContour = new IplImage(imgSrc.Size, BitDepth.U8, 3))
            using (var imgPolygon = new IplImage(imgSrc.Size, BitDepth.U8, 3))
            {
                Cv.CvtColor(imgSrc, imgBinary, ColorConversion.BgrToGray);
                Cv.Threshold(imgBinary, imgBinary, 100, 255, ThresholdType.Binary);

                CvBlobs blobs = new CvBlobs();
                blobs.Label(imgBinary);

                foreach (KeyValuePair<int, CvBlob> item in blobs)
                {
                    CvBlob b = item.Value;
                    Console.WriteLine("{0} | Centroid:{1} Area:{2}", item.Key, b.Centroid, b.Area);

                    CvContourChainCode cc = b.Contour;
                    cc.Render(imgContour);

                    CvContourPolygon polygon = cc.ConvertToPolygon();
                    foreach (CvPoint p in polygon)
                    {
                        imgPolygon.Circle(p, 1, CvColor.Red, -1);
                    }

                    /*
                    CvPoint2D32f circleCenter;
                    float circleRadius;
                    GetEnclosingCircle(polygon, out circleCenter, out circleRadius);
                    imgPolygon.Circle(circleCenter, (int) circleRadius, CvColor.Green, 2);
                    */
                }

                blobs.RenderBlobs(imgSrc, imgRender);

                using (new CvWindow("render", imgRender))
                using (new CvWindow("contour", imgContour))
                using (new CvWindow("polygon vertices", imgPolygon))
                {
                    Cv.WaitKey(0);
                }
            }
        }
예제 #10
0
        public Blob()
        {
            using (var imgSrc = new IplImage(FilePath.Image.Shapes, LoadMode.Color))
                using (var imgBinary = new IplImage(imgSrc.Size, BitDepth.U8, 1))
                    using (var imgRender = new IplImage(imgSrc.Size, BitDepth.U8, 3))
                        using (var imgContour = new IplImage(imgSrc.Size, BitDepth.U8, 3))
                            using (var imgPolygon = new IplImage(imgSrc.Size, BitDepth.U8, 3))
                            {
                                Cv.CvtColor(imgSrc, imgBinary, ColorConversion.BgrToGray);
                                Cv.Threshold(imgBinary, imgBinary, 100, 255, ThresholdType.Binary);

                                CvBlobs blobs = new CvBlobs();
                                blobs.Label(imgBinary);

                                foreach (KeyValuePair <int, CvBlob> item in blobs)
                                {
                                    CvBlob b = item.Value;
                                    Console.WriteLine("{0} | Centroid:{1} Area:{2}", item.Key, b.Centroid, b.Area);

                                    CvContourChainCode cc = b.Contour;
                                    cc.Render(imgContour);

                                    CvContourPolygon polygon = cc.ConvertToPolygon();
                                    foreach (CvPoint p in polygon)
                                    {
                                        imgPolygon.Circle(p, 1, CvColor.Red, -1);
                                    }

                                    /*
                                     * CvPoint2D32f circleCenter;
                                     * float circleRadius;
                                     * GetEnclosingCircle(polygon, out circleCenter, out circleRadius);
                                     * imgPolygon.Circle(circleCenter, (int) circleRadius, CvColor.Green, 2);
                                     */
                                }

                                blobs.RenderBlobs(imgSrc, imgRender);

                                using (new CvWindow("render", imgRender))
                                    using (new CvWindow("contour", imgContour))
                                        using (new CvWindow("polygon vertices", imgPolygon))
                                        {
                                            Cv.WaitKey(0);
                                        }
                            }
        }
예제 #11
0
        public BlobOld()
        {
            using (IplImage imgSrc = new IplImage(Const.ImageShapes, LoadMode.Color))
            using (IplImage imgBinary = new IplImage(imgSrc.Size, BitDepth.U8, 1))
            using (IplImage imgLabel = new IplImage(imgSrc.Size, BitDepth.F32, 1))            
            using (IplImage imgRender = new IplImage(imgSrc.Size, BitDepth.U8, 3))
            using (IplImage imgContour = new IplImage(imgSrc.Size, BitDepth.U8, 3))
            using (IplImage imgPolygon = new IplImage(imgSrc.Size, BitDepth.U8, 3))
            {
                Cv.CvtColor(imgSrc, imgBinary, ColorConversion.BgrToGray);
                Cv.Threshold(imgBinary, imgBinary, 100, 255, ThresholdType.Binary);

                using (CvBlobs blobs = new CvBlobs())
                {
                    uint result = blobs.Label(imgBinary, imgLabel);

                    foreach (KeyValuePair<uint, CvBlob> item in blobs)
                    {
                        CvBlob b = item.Value;
                        Console.WriteLine("{0} | Centroid:{1} Area:{2}", item.Key, b.Centroid, b.Area);

                        CvContourChainCode cc = b.Contour;
                        cc.RenderContourChainCode(imgContour);

                        CvContourPolygon polygon = cc.ConvertChainCodesToPolygon();
                        foreach (CvPoint p in polygon)
                        {
                            imgPolygon.Circle(p, 1, CvColor.Red, -1);
                        }
                    }

                    blobs.RenderBlobs(imgLabel, imgSrc, imgRender);

                    using (new CvWindow("render", imgRender))
                    using (new CvWindow("contour", imgContour))
                    using (new CvWindow("polygon vertices", imgPolygon))
                    {
                        Cv.WaitKey(0);
                    }
                }

            }
        }
예제 #12
0
        public static void CarbonPaper(int x1 = 100, int y1 = 300, int x2 = 1100, int y2 = 1600, ThresholdTypes thrType = ThresholdTypes.Binary, int thr = 128, int filterArea = 30)
        {
            // 1. convert to grayscale
            var matGray = Glb.matSrc.CvtColor(ColorConversionCodes.BGR2GRAY);

            // 2. roi crop
            Rect roi            = new Rect(x1, y1, x2 - x1 + 1, y2 - y1 + 1);
            var  matGrayDrawRoi = Glb.matSrc.Clone();

            matGrayDrawRoi.Rectangle(roi, Scalar.Yellow);
            Glb.DrawMat0(matGrayDrawRoi);

            var matRoi = new Mat(matGray, roi);

            Glb.DrawHist0(matRoi);

            // 3. threshold
            var matThr = matRoi.Threshold(thr, 255, thrType);

            Glb.DrawMatAndHist1(matThr);

            // 4. blob with area filter
            CvBlobs blobs = new CvBlobs();

            blobs.Label(matThr);
            blobs.FilterByArea(filterArea, int.MaxValue);

            // 5. display blob
            var matDsp = new Mat(matRoi.Rows, matRoi.Cols, MatType.CV_8UC3);

            matDsp.SetTo(Scalar.Black);
            blobs.RenderBlobs(matDsp, matDsp, RenderBlobsModes.Color);
            Glb.DrawMatAndHist2(matDsp);

            Console.WriteLine("blobs.cnt = {0}", blobs.Count);

            matGray.Dispose();
            matGrayDrawRoi.Dispose();
            matRoi.Dispose();
            matThr.Dispose();
            matDsp.Dispose();
        }
예제 #13
0
        public IplImage BlobImage(IplImage src)
        {
            blob = new IplImage(src.Size, BitDepth.U8, 3);
            bin  = this.Binary(src, 50);

            CvBlobs blobs = new CvBlobs();

            blobs.Label(bin);

            blobs.RenderBlobs(src, blob);

            foreach (KeyValuePair <int, CvBlob> item in blobs)
            {
                CvBlob b = item.Value;

                Cv.PutText(blob, Convert.ToString(b.Label), b.Centroid, new CvFont(FontFace.HersheyComplex, 1, 1), CvColor.Red);
            }

            return(blob);
        }
예제 #14
0
        public void 評価結果画像作成_debug(Mat 検査結果, Mat テンプレート, int[,] 正解座標, ref Mat color_debug)
        {
            Mat res_color  = new Mat(new Size(検査結果.Width, 検査結果.Height), MatType.CV_8UC3, Scalar.All(0));
            var temp_color = res_color.Clone();

            CvBlobs blobs = new CvBlobs(検査結果);
            int     score = 0;

            blobs.FilterByArea(Main.FilterByArea[0], Main.FilterByArea[1]);
            blobs.RenderBlobs(検査結果, res_color);


            Cv2.CvtColor(テンプレート, temp_color, ColorConversionCodes.GRAY2BGR);
            Cv2.Add(temp_color, res_color, color_debug);

            点数計算_debug(blobs, 正解座標, ref color_debug, ref score);

            res_color.Dispose();
            temp_color.Dispose();
            blobs = null;
        }
예제 #15
0
        public Blob()
        {
            using (IplImage imgSrc = new IplImage(Const.ImageShapes, LoadMode.Color))
                using (IplImage imgBinary = new IplImage(imgSrc.Size, BitDepth.U8, 1))
                    using (IplImage imgLabel = new IplImage(imgSrc.Size, BitDepth.F32, 1))
                        using (IplImage imgRender = new IplImage(imgSrc.Size, BitDepth.U8, 3))
                            using (IplImage imgContour = new IplImage(imgSrc.Size, BitDepth.U8, 3))
                                using (IplImage imgPolygon = new IplImage(imgSrc.Size, BitDepth.U8, 3))
                                {
                                    Cv.CvtColor(imgSrc, imgBinary, ColorConversion.BgrToGray);
                                    Cv.Threshold(imgBinary, imgBinary, 100, 255, ThresholdType.Binary);

                                    CvBlobs blobs = new CvBlobs();
                                    blobs.Label(imgBinary);

                                    foreach (KeyValuePair <int, CvBlob> item in blobs)
                                    {
                                        CvBlob b = item.Value;
                                        Console.WriteLine("{0} | Centroid:{1} Area:{2}", item.Key, b.Centroid, b.Area);

                                        CvContourChainCode cc = b.Contour;
                                        cc.Render(imgContour);

                                        CvContourPolygon polygon = cc.ConvertToPolygon();
                                        foreach (CvPoint p in polygon)
                                        {
                                            imgPolygon.Circle(p, 1, CvColor.Red, -1);
                                        }
                                    }

                                    blobs.RenderBlobs(imgSrc, imgRender);

                                    using (new CvWindow("render", imgRender))
                                        using (new CvWindow("contour", imgContour))
                                            using (new CvWindow("polygon vertices", imgPolygon))
                                            {
                                                Cv.WaitKey(0);
                                            }
                                }
        }
예제 #16
0
        /// <summary>
        /// This is the method that actually does the work.
        /// </summary>
        /// <param name="DA">The DA object can be used to retrieve data from input parameters and
        /// to store data in output parameters.</param>
        protected override void SolveInstance(IGH_DataAccess DA)

        {
            string path    = "";
            int    size    = 0;
            bool   debug   = false;
            int    maxNgon = 3;
            double tol     = -1;

            if (!DA.GetData(0, ref path))
            {
                return;
            }
            if (!DA.GetData(1, ref size))
            {
                return;
            }
            DA.GetData(2, ref maxNgon);
            DA.GetData(3, ref tol);

            IplImage img    = new IplImage(path, LoadMode.Color);
            IplImage imgHSV = new IplImage(img.Size, BitDepth.U8, 3);

            Cv.CvtColor(img, imgHSV, ColorConversion.RgbToHsv);

            var      channels = imgHSV.Split();
            IplImage hue      = channels[0];



            IplImage Render = new IplImage(img.Size, BitDepth.U8, 3);

            //色抽出マスク用画像宣言
            IplImage imgB1 = new IplImage(img.Size, BitDepth.U8, 1);
            IplImage imgB2 = new IplImage(img.Size, BitDepth.U8, 1);
            IplImage imgR  = new IplImage(img.Size, BitDepth.U8, 1);
            IplImage imgG  = new IplImage(img.Size, BitDepth.U8, 1);
            IplImage imgB  = new IplImage(img.Size, BitDepth.U8, 1);

            int RG  = 30;
            int GB  = 90;
            int BR  = 150;
            int off = 1;

            int smin = 30;
            int bmin = 30;

            //色抽出用閾値作成
            CvScalar Bmin1 = new CvScalar(0, smin, bmin);
            CvScalar Bmax1 = new CvScalar(RG - off, 255, 255);

            CvScalar Bmin2 = new CvScalar(BR + off, smin, bmin);
            CvScalar Bmax2 = new CvScalar(180, 255, 255);

            CvScalar Gmin = new CvScalar(RG + off, smin, bmin);
            CvScalar Gmax = new CvScalar(GB - off, 255, 255);

            CvScalar Rmin = new CvScalar(GB + off, smin, bmin);
            CvScalar Rmax = new CvScalar(BR - off, 255, 255);

            //閾値を用いて色抽出
            Cv.InRangeS(imgHSV, Bmin1, Bmax1, imgB1);
            Cv.InRangeS(imgHSV, Bmin2, Bmax2, imgB2);
            Cv.Add(imgB1, imgB2, imgB);
            Cv.InRangeS(imgHSV, Gmin, Gmax, imgG);
            Cv.InRangeS(imgHSV, Rmin, Rmax, imgR);


            //Blobs化
            CvBlobs Rs = new CvBlobs(imgR);
            CvBlobs Gs = new CvBlobs(imgG);
            CvBlobs Bs = new CvBlobs(imgB);

            int minArea = img.Width * img.Height / 20000;
            int maxArea = img.Width * img.Height;

            Bs.FilterByArea(minArea, maxArea);
            Rs.FilterByArea(minArea, maxArea);
            Gs.FilterByArea(minArea, maxArea);


            //blobの配列化
            CvBlob[] Rblobs = new CvBlob[Rs.Count];
            CvBlob[] Bblobs = new CvBlob[Bs.Count];
            CvBlob[] Gblobs = new CvBlob[Gs.Count];
            Rs.Values.CopyTo(Rblobs, 0);
            Bs.Values.CopyTo(Bblobs, 0);
            Gs.Values.CopyTo(Gblobs, 0);


            if (!debug)
            {
                string deb = "";

                foreach (var bbbb in Rblobs)
                {
                    deb += bbbb.Area + "\r\n";
                }

                //BlobからLine化
                List <Line> Rlines = ExtractLinesFromBlobs(Rblobs);
                List <Line> Blines = ExtractLinesFromBlobs(Bblobs);
                List <Line> Glines = ExtractLinesFromBlobs(Gblobs);

                //scale
                double MinSize     = Math.Min(img.Width, img.Height);
                double ScaleFactor = (double)size / MinSize;
                var    scale       = Transform.Scale(new Point3d(0, 0, 0), ScaleFactor);

                Network network = new Network();

                //ネットワークにLineを色ごとにラベル付きで入れる
                foreach (var l in Rlines)
                {
                    l.Transform(scale);
                    network.Add(l, 0);
                }
                foreach (var l in Blines)
                {
                    l.Transform(scale);
                    network.Add(l, 1);
                }
                foreach (var l in Glines)
                {
                    l.Transform(scale);
                    network.Add(l, 2);
                }



                double t = network.SearchWeldToleranceBinary(0, (double)size / 10, 0, 10);
                if (tol != -1)
                {
                    network.weld(tol * size);
                }
                else
                {
                    network.weld(t);
                }

                deb += "tolerance: " + t + "\r\n\r\n";



                //ウェルド後のエッジ抽出
                Rlines = network.ExtractLines(0);
                Blines = network.ExtractLines(1);
                Glines = network.ExtractLines(2);

                List <List <int> > faces = network.detectCycles(maxNgon);


                deb += "B: " + Bs.Count.ToString() + "\r\n";
                deb += "R: " + Rs.Count.ToString() + "\r\n";
                deb += "G: " + Gs.Count.ToString() + "\r\n";


                Mesh mesh = GenerateMesh(network.verts, faces);
                mesh.Normals.ComputeNormals();

                DA.SetDataList(0, network.verts);
                DA.SetDataList(1, Rlines);
                DA.SetDataList(2, Blines);
                DA.SetDataList(3, Glines);
                DA.SetData(4, mesh);
            }
            else
            {
                //赤レンダリング
                Rs.RenderBlobs(img, Render, RenderBlobsMode.Angle);
                Rs.RenderBlobs(img, Render, RenderBlobsMode.BoundingBox);
                Rs.RenderBlobs(img, Render, RenderBlobsMode.Centroid);

                //青レンダリング
                Bs.RenderBlobs(img, Render, RenderBlobsMode.Angle);
                Bs.RenderBlobs(img, Render, RenderBlobsMode.BoundingBox);
                Bs.RenderBlobs(img, Render, RenderBlobsMode.Centroid);

                //黒レンダリング
                Gs.RenderBlobs(img, Render, RenderBlobsMode.Angle);
                Gs.RenderBlobs(img, Render, RenderBlobsMode.BoundingBox);
                Gs.RenderBlobs(img, Render, RenderBlobsMode.Centroid);

                Cv.NamedWindow("test");
                IplImage Render2 = new IplImage(img.Size.Width / 4, img.Size.Height / 4, BitDepth.U8, 3);

                string deb = "";
                deb += "B: " + Bs.Count.ToString() + "\r\n";
                deb += "R: " + Rs.Count.ToString() + "\r\n";
                deb += "G: " + Gs.Count.ToString() + "\r\n";

                Cv.Resize(Render, Render2);
                Cv.ShowImage("test", Render2);

                Cv.WaitKey();

                Cv.DestroyWindow("test");
            }

            Cv.ReleaseImage(img);
            Cv.ReleaseImage(imgHSV);
        }
예제 #17
0
        private CvBlobs PreProcessImage1(IplImage mainSubImage, IplImage imgGray)
        {
            CvBlobs  blobs    = null;
            IplImage tmpImage = null;
            IplImage gray     = null;
            IplImage tgray    = null;
            IplImage labelImg = null;
            IplImage temp     = null;

            try
            {
                tgray = imgGray.Clone();
                gray  = new IplImage(tgray.Size, tgray.Depth, 1);
                Cv.Smooth(tgray, tgray, SmoothType.Gaussian);
                Cv.Canny(tgray, gray, 500, 2, ApertureSize.Size5);
                temp = gray.Clone();
                //IplConvKernel element = Cv.CreateStructuringElementEx(5, 1, 3, 0, ElementShape.Rect, null);
                IplConvKernel element = Cv.CreateStructuringElementEx(7, 1, 3, 0, ElementShape.Rect, null);
                Cv.MorphologyEx(gray, gray, temp, element, MorphologyOperation.BlackHat, 1);
                Cv.Threshold(gray, gray, 100, 255, ThresholdType.Binary | ThresholdType.Otsu);
                Cv.Smooth(gray, gray, SmoothType.Gaussian);


                labelImg = new IplImage(mainSubImage.Size, CvBlobLib.DepthLabel, 1);
                blobs    = new CvBlobs();
                CvBlobLib.Label(gray, labelImg, blobs);
                CvBlobLib.FilterByArea(blobs, 1550, 4850);

                tmpImage = mainSubImage.Clone();
                //CvTracks tracks = new CvTracks();
                //CvBlobLib.UpdateTracks(blobs, tracks, 200.0, 5);
                //CvBlobLib.RenderTracks(tracks, tmpImage, tmpImage, RenderTracksMode.ID);
                blobs.RenderBlobs(labelImg, mainSubImage, tmpImage, RenderBlobsMode.BoundingBox | RenderBlobsMode.Angle);

                /*
                 * img.SetROI(subImageRect);
                 * Cv.Copy(tmpImage, img);
                 * img.ResetROI();
                 * Cv.ReleaseImage(tmpImage);
                 *
                 */
            }
            finally
            {
                if (null != temp)
                {
                    Cv.ReleaseImage(temp);
                }

                if (null != tgray)
                {
                    Cv.ReleaseImage(tgray);
                }

                if (null != gray)
                {
                    Cv.ReleaseImage(gray);
                }

                if (null != labelImg)
                {
                    Cv.ReleaseImage(labelImg);
                }

                if (null != tmpImage)
                {
                    Cv.ReleaseImage(tmpImage);
                }
            }

            return(blobs);
        }
예제 #18
0
        /// <summary>
        /// Takes blobs information based on colors in <see cref="hsv"/> list and then sends the info through UDP.
        /// </summary>
        /// <param name="sourceImage">Image in Mat format.</param>
        /// <returns>Image in Mat format.</returns>
        private Mat Renderer(Mat sourceImage)
        {
            Mat dstNoisy = src;
            Mat dstClear = new Mat();
            Mat dst      = new Mat();
            Mat element  = Cv2.GetStructuringElement(MorphShapes.Rect, new Size(2 * MorphValue + 1, 2 * MorphValue + 1));

            Cv2.Blur(dstNoisy, dstClear, new Size(9, 9));

            Cv2.CvtColor(dstClear, dst, ColorConversionCodes.BGR2HSV); // Convert BGR to HSV.

            Mat dstThreshed = new Mat();
            Mat dstPreview  = new Mat();

            if (hsv.Count > 0)
            {
                int  blobCount = 1;
                bool theFirst  = true;

                foreach (int[] scal in hsv)
                {
                    if (theFirst)
                    {
                        Cv2.InRange(dst, new Scalar(scal[0] - 10, scal[1], scal[3]), new Scalar(scal[0] + 10, scal[2], scal[4]), dstPreview);
                        theFirst = false;
                    }
                    else
                    {
                        Mat dstPreview2 = new Mat();
                        Cv2.InRange(dst, new Scalar(scal[0] - 10, scal[1], scal[3]), new Scalar(scal[0] + 10, scal[2], scal[4]), dstPreview2);
                        Cv2.AddWeighted(dstThreshed, 1.0, dstPreview2, 1.0, 0.0, dstPreview);
                    }
                    Cv2.InRange(dst, new Scalar(scal[0] - 10, scal[1], scal[3]), new Scalar(scal[0] + 10, scal[2], scal[4]), dstThreshed);

                    // Morphologic transformation to close the gaps inside the blob.
                    Cv2.MorphologyEx(src: dstThreshed,
                                     dst: dstThreshed,
                                     op: MorphTypes.Close,
                                     element: element
                                     );

                    blobDetection.Label(dstThreshed);
                    blobDetection.FilterByArea(MinBlobArea, MaxBlobArea);
                    blobDetection.RenderBlobs(dstThreshed, src);
                    CircleSegment[] circles = Cv2.HoughCircles(dstThreshed, HoughMethods.Gradient, 1, dstThreshed.Rows / 8);

                    // Creates all udp datagrams----------------------------------------------------
                    if (blobDetection.Count != 0)
                    {
                        for (int i = 0; i < blobDetection.Count; i++)
                        {
                            int processKey = blobDetection.ElementAt(i).Key;
                            udpDatagram_1 = "[$]tracking|id=" + data_id + "|label=" + blobCount + "|[$$]" + deviceName + ",[$$$]mesh,sample,";
                            for (int j = 0; j < blobDetection[processKey].Contour.ConvertToPolygon().Simplify(1).Count; j++)
                            {
                                if (orientation)
                                {
                                    udpDatagram_1 += Math.Round((float)blobDetection[processKey].Contour.ConvertToPolygon().Simplify(1).ElementAt(j).X / dst.Cols, 4).ToString().Replace(',', '.');
                                    udpDatagram_1 += "," + (1 - Math.Round((float)blobDetection[processKey].Contour.ConvertToPolygon().Simplify(1).ElementAt(j).Y / dst.Rows, 4)).ToString().Replace(',', '.');
                                    udpDatagram_1 += ",";
                                }
                                else
                                {
                                    udpDatagram_1 += (1 - Math.Round((float)blobDetection[processKey].Contour.ConvertToPolygon().Simplify(1).ElementAt(j).X / dst.Cols, 4)).ToString().Replace(',', '.');
                                    udpDatagram_1 += "," + Math.Round((float)blobDetection[processKey].Contour.ConvertToPolygon().Simplify(1).ElementAt(j).Y / dst.Rows, 4).ToString().Replace(',', '.');
                                    udpDatagram_1 += ",";
                                }
                            }
                            udpDatagram_1 += ";";
                            udpDatagram_2  = "[$]tracking|id=" + data_id + "|label=" + blobCount + "|[$$]" + deviceName + ",[$$$]area,";
                            udpDatagram_2 += "value," + blobDetection[processKey].Contour.ConvertToPolygon().Simplify(1).Area().ToString().Replace(',', '.') + ";";
                            udpDatagram_3  = "[$]tracking|id=" + data_id + "|label=" + blobCount + "|[$$]" + deviceName + ",[$$$]place,";
                            if (orientation)
                            {
                                udpDatagram_3 += "position," + (Math.Round(blobDetection[processKey].Centroid.X / dst.Cols, 3)).ToString().Replace(',', '.') + "," + (Math.Round(1 - (blobDetection[processKey].Centroid.Y / dst.Rows), 3)).ToString().Replace(',', '.') + ";";
                            }
                            else
                            {
                                udpDatagram_3 += "position," + (Math.Round(1 - (blobDetection[processKey].Centroid.X / dst.Cols), 3)).ToString().Replace(',', '.') + "," + (Math.Round(blobDetection[processKey].Centroid.Y / dst.Rows, 3)).ToString().Replace(',', '.') + ";";
                            }
                            udpDatagram_4  = "[$]tracking|id=" + data_id + "|label=" + blobCount + "|[$$]" + deviceName + ",[$$$]color,";
                            udpDatagram_4 += "hsv," + scal[0] + "-" + (scal[1] + scal[2]) / 2 + "-" + (scal[3] + scal[4]) / 2 + ";";

                            //Geometry
                            udpDatagram_5 = "[$]tracking|id=" + data_id + "|label=" + blobCount + "|[$$]" + deviceName + ",[$$$]form,geometry,";
                            CvContourPolygon poly          = blobDetection[processKey].Contour.ConvertToPolygon();
                            double           epsilon       = 0.04 * Cv2.ArcLength(poly, true);
                            Point[]          counterResult = Cv2.ApproxPolyDP(poly, epsilon, closed: true);
                            int    contourSimple_counter   = counterResult.Length;
                            string geometry = "";
                            switch (contourSimple_counter)
                            {
                            case 3:
                                geometry = "triangle";
                                break;

                            case 4:
                                Rect  rect        = Cv2.BoundingRect(poly);
                                float aspectRatio = 0;
                                if (rect.Y != 0)
                                {
                                    aspectRatio = rect.X / rect.Y;
                                }
                                if (aspectRatio >= 0.95 && aspectRatio <= 1.05)
                                {
                                    geometry = "square";
                                }
                                else
                                {
                                    geometry = "rectangle";
                                }
                                break;

                            default:
                                geometry = "unidentified" + contourSimple_counter;
                                break;
                            }
                            udpDatagram_5 += geometry + ";";
                            if (BlobLabel)
                            {
                                Cv2.PutText(src, geometry, blobDetection[processKey].Centroid, HersheyFonts.HersheySimplex, 0.5, new Scalar(0, 255, 0), 2);
                                Cv2.PutText(src, "[" + scal[0] + ", " + ((scal[1] + scal[2]) / 2) + ", " + ((scal[3] + scal[4]) / 2) + "]", new Point(blobDetection[processKey].Centroid.X, blobDetection[processKey].Centroid.Y + 20), HersheyFonts.HersheySimplex, 0.45, new Scalar(0, 255, 0), 2);
                            }
                            udpDatagram_6  = "[$]tracking|id=" + data_id + "|label=" + blobCount + "|[$$]" + deviceName + ",[$$$]perimeter,value,";
                            udpDatagram_6 += blobDetection[processKey].Contour.Perimeter().ToString().Replace(',', '.') + ";";

                            // UDP sender---------------------------------------------------------------------
                            try
                            {
                                byte[] sendBytes_1 = Encoding.ASCII.GetBytes(udpDatagram_1);
                                byte[] sendBytes_2 = Encoding.ASCII.GetBytes(udpDatagram_2);
                                byte[] sendBytes_3 = Encoding.ASCII.GetBytes(udpDatagram_3);
                                byte[] sendBytes_4 = Encoding.ASCII.GetBytes(udpDatagram_4);
                                byte[] sendBytes_5 = Encoding.ASCII.GetBytes(udpDatagram_5);
                                byte[] sendBytes_6 = Encoding.ASCII.GetBytes(udpDatagram_6);
                                udpClient.Send(sendBytes_1, sendBytes_1.Length, IP_udp, Port_udp);
                                udpClient.Send(sendBytes_2, sendBytes_2.Length, IP_udp, Port_udp);
                                udpClient.Send(sendBytes_3, sendBytes_3.Length, IP_udp, Port_udp);
                                udpClient.Send(sendBytes_4, sendBytes_4.Length, IP_udp, Port_udp);
                                udpClient.Send(sendBytes_5, sendBytes_5.Length, IP_udp, Port_udp);
                                udpClient.Send(sendBytes_6, sendBytes_6.Length, IP_udp, Port_udp);
                            }
                            catch (Exception e)
                            {
                                Console.WriteLine(e.ToString());
                            }
                            udpDatagram_1 = "";
                            udpDatagram_2 = "";
                            udpDatagram_3 = "";
                            udpDatagram_4 = "";
                            blobCount++;
                        }
                    }
                }
                blobCount = 1;
            }

            // Same morphologic transformation but this time for the output image.
            Cv2.MorphologyEx(src: dstPreview,
                             dst: dstPreview,
                             op: MorphTypes.Close,
                             element: element
                             );

            return(dstPreview);
        }
예제 #19
0
파일: Program.cs 프로젝트: 0sv/opencvsharp
        private static void Track()
        {
            using (var video = new CvCapture("data/bach.mp4"))
            {
                IplImage frame = null;
                IplImage gray = null;
                IplImage binary = null;
                IplImage render = null;
                IplImage renderTracks = null;
                CvTracks tracks = new CvTracks();
                CvWindow window = new CvWindow("render");
                CvWindow windowTracks = new CvWindow("tracks");

                for (int i = 0; ; i++)
                {
                    frame = video.QueryFrame();
                    //if (frame == null)
                    //    frame = new IplImage("data/shapes.png");
                    if (gray == null)
                    {
                        gray = new IplImage(frame.Size, BitDepth.U8, 1);
                        binary = new IplImage(frame.Size, BitDepth.U8, 1);
                        render = new IplImage(frame.Size, BitDepth.U8, 3);
                        renderTracks = new IplImage(frame.Size, BitDepth.U8, 3);
                    }

                    render.Zero();
                    renderTracks.Zero();

                    Cv.CvtColor(frame, gray, ColorConversion.BgrToGray);
                    Cv.Threshold(gray, binary, 0, 255, ThresholdType.Otsu);

                    CvBlobs blobs = new CvBlobs(binary);
                    CvBlobs newBlobs = new CvBlobs(blobs
                        .OrderByDescending(pair => pair.Value.Area)
                        .Take(200)
                        .ToDictionary(pair => pair.Key, pair => pair.Value), blobs.Labels);
                    newBlobs.RenderBlobs(binary, render);
                    window.ShowImage(render);

                    newBlobs.UpdateTracks(tracks, 10.0, Int32.MaxValue);
                    tracks.Render(binary, renderTracks);
                    windowTracks.ShowImage(renderTracks);

                    Cv.WaitKey(200);
                    Console.WriteLine(i);
                }
            }
        }
예제 #20
0
        private CvBlobs PreProcessImage2_old(IplImage img)
        {
            CvBlobs       blobs     = null;
            IplConvKernel element   = null;
            IplImage      temp      = null;
            IplImage      dest      = null;
            IplImage      tmpImage  = null;
            IplImage      tmpImage2 = null;
            IplImage      labelImg  = null;

            try
            {
                element  = Cv.CreateStructuringElementEx(180, 5, 90, 1, ElementShape.Rect, null);
                tmpImage = new IplImage(img.Size, BitDepth.U8, 1);
                temp     = tmpImage.Clone();
                dest     = tmpImage.Clone();
                img.CvtColor(tmpImage, ColorConversion.RgbaToGray);
                tmpImage.Rectangle(new CvPoint(0, 0), new CvPoint((Int32)(tmpImage.Size.Width), (Int32)((tmpImage.Size.Height / 9) * 3)), new CvScalar(255, 255, 255), -1);
                tmpImage.Rectangle(new CvPoint(0, (Int32)((tmpImage.Size.Height / 5) * 4)), new CvPoint((Int32)(tmpImage.Size.Width), (Int32)(tmpImage.Size.Height)), new CvScalar(255, 255, 255), -1);
                tmpImage.Rectangle(new CvPoint((Int32)((tmpImage.Size.Width / 9) * 7), 0), new CvPoint((Int32)((tmpImage.Size.Width)), (Int32)(tmpImage.Size.Height)), new CvScalar(255, 255, 255), -1);
                Cv.Smooth(tmpImage, tmpImage, SmoothType.Gaussian);
                Cv.MorphologyEx(tmpImage, dest, temp, element, MorphologyOperation.TopHat, 1);
                Cv.Threshold(dest, tmpImage, 128, 255, ThresholdType.Binary | ThresholdType.Otsu);
                Cv.Smooth(tmpImage, dest, SmoothType.Median);


                labelImg  = new IplImage(img.Size, CvBlobLib.DepthLabel, 1);
                blobs     = new CvBlobs();
                tmpImage2 = tmpImage.Clone();
                CvBlobLib.Label(tmpImage2, labelImg, blobs);

                //Cv.ReleaseImage(tmpImage);
                //tmpImage = img.Clone();
                //blobs.RenderBlobs(labelImg, img, tmpImage);
                //tmpImage.SaveImage(@"c:\temp\newImages\RenderBlobsNOFiltered.png");


                CvBlobLib.FilterByArea(blobs, 850, 4850);
                Cv.ReleaseImage(tmpImage);
                tmpImage = img.Clone();
                //CvTracks tracks = new CvTracks();
                //CvBlobLib.UpdateTracks(blobs, tracks, 200.0, 5);
                //CvBlobLib.RenderTracks(tracks, tmpImage, tmpImage, RenderTracksMode.ID);
                blobs.RenderBlobs(labelImg, img, tmpImage, RenderBlobsMode.BoundingBox | RenderBlobsMode.Angle);
                //tmpImage.SaveImage(@"c:\temp\newImages\RenderBlobsFiltered.png");
            }
            finally
            {
                if (null != temp)
                {
                    Cv.ReleaseImage(temp);
                }

                if (null != dest)
                {
                    Cv.ReleaseImage(dest);
                }

                if (null != tmpImage)
                {
                    Cv.ReleaseImage(tmpImage);
                }

                if (null != tmpImage2)
                {
                    Cv.ReleaseImage(tmpImage2);
                }

                if (null != labelImg)
                {
                    Cv.ReleaseImage(labelImg);
                }
            }


            return(blobs);
        }
예제 #21
0
    // Use this for initialization
    void Start()
    {
        using (var video = new VideoCapture(0))
        {
            //保存先行列
            var frame = new Mat();
            var gray  = new Mat();
            //保存
            while (Cv2.WaitKey(1) == -1)
            {
                //画像読み込み
                video.Read(frame);
                //グレースケール変換
                Cv2.CvtColor(frame, gray, ColorConversionCodes.BGR2GRAY);
                //二値化
                using (var binary = gray.Threshold(100, 255, ThresholdTypes.Otsu | ThresholdTypes.Binary))
                {
                    // ラベリング実行
                    CvBlobs blobs = new CvBlobs(binary);

                    // 入力画像と同じサイズ、チャネル数の画像を生成
                    using (var render = new Mat(frame.Rows, frame.Cols, MatType.CV_8UC3, 3))
                        using (var img = new Mat(frame.Rows, frame.Cols, MatType.CV_8UC3, 3))
                        {
                            // ラベリング結果の描画
                            blobs.RenderBlobs(frame, render);
                            CvBlob maxBlob = blobs.LargestBlob();
                            Debug.Log(maxBlob.Rect);
                            Debug.Log(maxBlob.Centroid);
                            Debug.Log(maxBlob.Area);

                            // 各blob(輪郭要素)の情報の取得
                            //foreach (KeyValuePair<int, CvBlob> item in blobs)
                            //{
                            //    int labelValue = item.Key;
                            //    CvBlob blob = item.Value;
                            //    // 外接矩形(矩形の左上の座標(x,y),幅と高さ)
                            //    Console.WriteLine("外接矩形は、{0}", blob.Rect);

                            //    // 面積
                            //    Console.WriteLine("面積は、{0}", blob.Area);
                            //    // 重心
                            //    Console.WriteLine("重心は、{0}", blob.Centroid);
                            //    Debug.Log(blob.Centroid);
                            //    // 角度
                            //    Console.WriteLine("角度は、{0}", blob.Angle());
                            //    // ラベルの数値
                            //    Console.WriteLine("ラベルは、{0}", blob.Label);
                            //    // 輪郭情報を得る(ここではキーのみで特に意味なし)
                            //    Console.WriteLine("輪郭は、{0}", blob.Contour);

                            //    // 輪郭情報を得る
                            //    CvContourChainCode cc = blob.Contour;
                            //    // 描画
                            //    cc.Render(img);

                            //    // 周囲長の取得と表示
                            //    double perimeter = cc.Perimeter();
                            //    Console.WriteLine("周囲長は、{0}", perimeter);
                            //    // スペース(特に意味なし)
                            //    Console.WriteLine("");
                            //}

                            // using (new Window("frame", frame))
                            //using (new Window("binary", binary))
                            using (new Window("render", render));
                            //using (new Window("img", img))
                            // {
                            //     Cv2.WaitKey();
                            // }
                        }
                }
            }
        }
    }
예제 #22
0
        /// <summary>
        /// 指先の位置を探す
        /// </summary>
        /// <param name="targetName"></param>
        /// <returns></returns>
        public System.Drawing.Point FingerFinder(string targetName = "")
        {
            //To find the finger point which on the Graphics
            //カメラのパラメタ
            capFlag = true;
            Mat              tempImg, flipImg;
            Mat              grayImg, renderImg;
            Mat              srcImgbyCam = new Mat();
            double           centerX = 0.0, centerY = 0.0;
            ColorRecognition iRo = new ColorRecognition();

            var capture = new VideoCapture(CaptureDevice.Any)
            {
                //キャプチャする画像のサイズフレームレートの指定
                FrameHeight = 480,
                FrameWidth  = 320,
                //FrameHeight = 640, FrameWidth = 480,
            };

            using (capture)
            {
                while (capFlag)
                {
                    //カメラから画像をキャプチャする
                    capFlag = capture.Read(srcImgbyCam);

                    if (srcImgbyCam.Empty())
                    {
                        break;
                    }

                    //Camera Test window
                    Cv2.ImShow("srcImgbyCam", srcImgbyCam);
                    flipImg = srcImgbyCam.Clone();
                    flipImg = flipImg.Flip(FlipMode.XY);

                    tempImg = Mat.Zeros(srcImgbyCam.Size(), srcImgbyCam.Type());
                    grayImg = new Mat(srcImgbyCam.Size(), MatType.CV_8UC1);
                    //指検出方法
                    iRo.FindColor(ref flipImg, ref tempImg);

                    Cv2.CvtColor(tempImg, grayImg, ColorConversionCodes.BGR2GRAY);
                    Cv2.Threshold(grayImg, grayImg, 100, 255, ThresholdTypes.Binary);
                    //ラベリング処理
                    //CvBlobs blobs = new CvBlobs(grayImg2Ipl);
                    CvBlobs blobs = new CvBlobs(grayImg);
                    renderImg = new Mat(srcImgbyCam.Size(), MatType.CV_8UC3);
                    //ラベリング結果の描画
                    blobs.RenderBlobs(srcImgbyCam, renderImg);
                    //緑最大面積を返す
                    CvBlob maxblob = blobs.LargestBlob();

                    if (maxblob != null)
                    {
                        centerX = Math.Round(maxblob.Centroid.X, 2);
                        centerY = Math.Round(maxblob.Centroid.Y, 2);

                        //手動のキャリブレーション
                        centerX = (int)((centerX - 12) / 12.87);
                        centerY = (int)((centerY - 43) / 12.40);

                        //手動のキャリブレーション Ⅱ
                        centerX = (int)((centerX - 2) * 2);
                        centerY = (int)((centerY - 1) * 2);

                        //For Debug
                        textBox_CenterCoordinates.Text = centerX.ToString() + " , " + centerY.ToString();
                    }

                    int keyValue = Cv2.WaitKey(100);
                    if (keyValue == 27)
                    {
                        Window.DestroyAllWindows();
                        //対象Release
                        tempImg.Release(); flipImg.Release();
                        grayImg.Release(); renderImg.Release();
                        srcImgbyCam.Release();
                        capFlag = false;
                        break;   //ESC キーで閉じる
                    }
                }
            }
            return(new System.Drawing.Point(Convert.ToInt32(centerX + movement.X), Convert.ToInt32(centerY + movement.Y)));
        }
예제 #23
0
        public void Start()
        {
            if (canExecute)
            {
                return;            //既にカメラが起動していたら何もしない ※stop忘れ防止 Stopするのを忘れてStartすると二重起動して異常動作します
            }
            IsActive   = true;
            canExecute = true;
            var             im = new IplImage(); // カメラ画像格納用の変数
            WriteableBitmap buff = new WriteableBitmap(WIDTH, HEIGHT, 96, 96, PixelFormats.Bgr24, null);
            WriteableBitmap grayBuff = new WriteableBitmap(WIDTH, HEIGHT, 96, 96, PixelFormats.Gray8, null);
            IplImage        _mapX, _mapY;
            var             dst = new IplImage();


            Task.Run(() =>
            {
                //Thread.Sleep(1000);

                try
                {
                    cap = Cv.CreateCameraCapture(CameraNumber); // カメラのキャプチ

                    cap.SetCaptureProperty(CaptureProperty.FrameWidth, WIDTH);
                    cap.SetCaptureProperty(CaptureProperty.FrameHeight, HEIGHT);

                    SetWb();

                    var dis = App.Current.Dispatcher;

                    while (canExecute)             // 任意のキーが入力されるまでカメラ映像を表示
                    {
                        try
                        {
                            Thread.Sleep(100);
                            if (FlagPropChange)
                            {
                                cap.SetCaptureProperty(CaptureProperty.FrameWidth, WIDTH);
                                cap.SetCaptureProperty(CaptureProperty.FrameHeight, HEIGHT);
                                cap.SetCaptureProperty(CaptureProperty.Brightness, Brightness);
                                cap.SetCaptureProperty(CaptureProperty.Contrast, Contrast);
                                cap.SetCaptureProperty(CaptureProperty.Hue, Hue);
                                cap.SetCaptureProperty(CaptureProperty.Saturation, Saturation);
                                cap.SetCaptureProperty(CaptureProperty.Sharpness, Sharpness);
                                cap.SetCaptureProperty(CaptureProperty.Gamma, Gamma);
                                cap.SetCaptureProperty(CaptureProperty.Gain, Gain);
                                cap.SetCaptureProperty(CaptureProperty.Exposure, Exposure);//露出
                                //cap.SetCaptureProperty(CaptureProperty.WhiteBalance, White);//Opencvsharp2/3 非対応

                                dis.BeginInvoke(new Action(() =>
                                {
                                    try
                                    {
                                        FlagPropChange = false;
                                    }
                                    catch
                                    {
                                        MessageBox.Show("カメラ異常");
                                        canExecute = false;
                                    }
                                }));
                            }

                            im = Cv.QueryFrame(cap);//画像取得
                            if (im == null)
                            {
                                continue;
                            }
                            if (IsActive == true)
                            {
                                IsActive = false;
                            }

                            dst = new IplImage(im.Size, im.Depth, im.NChannels);

                            //set rectify data
                            _mapX = Cv.CreateImage(im.Size, BitDepth.F32, 1);
                            _mapY = Cv.CreateImage(im.Size, BitDepth.F32, 1);
                            Cv.InitUndistortMap(_fileIntrinsic, _fileDistortion, _mapX, _mapY);
                            Cv.Remap(im, dst, _mapX, _mapY);


                            //傾き補正
                            CvPoint2D32f center = new CvPoint2D32f(WIDTH / 2, HEIGHT / 2);
                            CvMat affineMatrix  = Cv.GetRotationMatrix2D(center, Theta, 1.0);
                            //Cv.WarpAffine(im, im, affineMatrix);
                            Cv.WarpAffine(dst, dst, affineMatrix);

                            if (FlagTestPic)
                            {
                                imageForTest = dst.Clone();
                                FlagTestPic  = false;
                            }

                            if (FlagLabeling)
                            {
                                var imageForLabeling = new IplImage(WIDTH, HEIGHT, BitDepth.U8, 3);
                                var imbuff           = dst.Clone();
                                var Binbuff          = Binary(imbuff);
                                blobs = new CvBlobs(Binbuff);

                                blobs.RenderBlobs(dst, imageForLabeling);
                                dis.BeginInvoke(new Action(() =>
                                {
                                    WriteableBitmapConverter.ToWriteableBitmap(imageForLabeling, buff);// カメラからフレーム(画像)を取得
                                    source = buff;
                                    imageForLabeling.Dispose();
                                }));

                                while (FlagNgFrame)
                                {
                                    ;
                                }

                                continue;
                            }



                            //二値化表示
                            if (FlagBin)
                            {
                                var imbuff  = dst.Clone();
                                var Binbuff = Binary(imbuff);
                                dis.BeginInvoke(new Action(() =>
                                {
                                    WriteableBitmapConverter.ToWriteableBitmap(Binbuff, grayBuff);// カメラからフレーム(画像)を取得
                                    source = grayBuff;
                                }));
                                continue;
                            }



                            //グリッド表示
                            if (FlagGrid)
                            {
                                foreach (var i in Enumerable.Range(0, HEIGHT / 10))
                                {
                                    var 行  = i * 10;
                                    var p1 = new CvPoint(0, 行);
                                    var p2 = new CvPoint(WIDTH, 行);
                                    dst.Line(p1, p2, CvColor.Aquamarine, 1, LineType.AntiAlias, 0);
                                }
                                foreach (var j in Enumerable.Range(0, WIDTH / 10))
                                {
                                    var 列  = j * 10;
                                    var p1 = new CvPoint(列, 0);
                                    var p2 = new CvPoint(列, HEIGHT);
                                    dst.Line(p1, p2, CvColor.Aquamarine, 1, LineType.AntiAlias, 0);
                                }
                                dis.BeginInvoke(new Action(() =>
                                {
                                    WriteableBitmapConverter.ToWriteableBitmap(dst, buff);// カメラからフレーム(画像)を取得
                                    source = buff;
                                }));
                                continue;
                            }



                            if (FlagFrame)
                            {
                                dis.BeginInvoke(new Action(() =>
                                {
                                    MakeFrame(dst);
                                    WriteableBitmapConverter.ToWriteableBitmap(dst, buff);// カメラからフレーム(画像)を取得
                                    source = buff;
                                }));
                                continue;
                            }

                            if (FlagNgFrame)//試験NGの場合
                            {
                                dis.BeginInvoke(new Action(() =>
                                {
                                    MakeNgFrame(imageForTest);
                                    WriteableBitmapConverter.ToWriteableBitmap(imageForTest, source);// カメラからフレーム(画像)を取得
                                }));

                                while (FlagNgFrame)
                                {
                                    ;
                                }
                            }

                            if (FlagHsv)
                            {
                                GetHsv(dst);
                            }

                            //すべてのフラグがfalseならノーマル表示する
                            dis.BeginInvoke(new Action(() =>
                            {
                                try
                                {
                                    WriteableBitmapConverter.ToWriteableBitmap(dst, buff);// カメラからフレーム(画像)を取得
                                    source = buff;
                                }
                                catch
                                {
                                    CamState   = false;
                                    canExecute = false;
                                }
                            }));
                        }
                        catch
                        {
                            //例外無視する処理を追加
                            CamState   = false;
                            canExecute = false;
                        }
                    }
                }
                catch
                {
                    CamState = false;
                }
                finally
                {
                    if (cap != null)
                    {
                        cap.Dispose();
                        cap = null;
                    }
                    IsActive = false;
                    Stopped  = true;
                }
            });
        }
예제 #24
0
        //輪郭抽出して中心座標取得
        Point GetCenterPointofLED(Mat grayImage)
        {
            OpenCvSharp.CPlusPlus.Point centerPoint = new OpenCvSharp.CPlusPlus.Point();
            IplImage grayIpl = grayImage.ToIplImage().Clone();
            IplImage calibIpl = new IplImage(grayIpl.Size, BitDepth.U8, 3);
            //中心の検出
            CvBlobs blobs = new CvBlobs();
            blobs.Label(grayIpl);
            //blobs.FilterByArea(20, 1500);
            CvBlob blob = blobs.LargestBlob();

            try
            {
                if (blob != null)
                {
                    centerPoint = new Point(blob.Centroid.X, blob.Centroid.Y);

                    blobs.RenderBlobs(grayIpl, calibIpl);
                }
            }catch{
                Console.WriteLine("eroor:counter");
            }

            this.CalibrationImage = new Mat(calibIpl);
            Console.WriteLine(centerPoint);
            return centerPoint;
        }
예제 #25
0
        private static void Main(string[] args)
        {
            //new ConvexityDefect();
            
            using (IplImage imgSrc = new IplImage(@"img\shapes.png", LoadMode.Color))
            using (IplImage imgBinary = new IplImage(imgSrc.Size, BitDepth.U8, 1))
            using (IplImage imgRender = new IplImage(imgSrc.Size, BitDepth.U8, 3))
            using (IplImage imgContour = new IplImage(imgSrc.Size, BitDepth.U8, 3))
            using (IplImage imgPolygon = new IplImage(imgSrc.Size, BitDepth.U8, 3))
            {
                Cv.CvtColor(imgSrc, imgBinary, ColorConversion.BgrToGray);
                Cv.Threshold(imgBinary, imgBinary, 100, 255, ThresholdType.Binary);

                CvBlobs blobs = new CvBlobs();
                blobs.Label(imgBinary);
                
                foreach (KeyValuePair<int, CvBlob> item in blobs)
                {
                    CvBlob b = item.Value;
                    Console.WriteLine("{0} | Centroid:{1} Area:{2}", item.Key, b.Centroid, b.Area);

                    CvContourChainCode cc = b.Contour;
                    cc.Render(imgContour);
                    double perimeter = cc.Perimeter();

                    CvContourPolygon polygon = cc.ConvertToPolygon();
                    foreach (CvPoint p in polygon)
                    {
                        //imgPolygon.Rectangle(p.X, p.Y, p.X + 1, p.Y + 1, CvColor.Red);
                    }

                    CvContourPolygon convexHull = polygon.ContourConvexHull();
                    //imgPolygon.Zero();
                    convexHull.Render(imgPolygon);
                }

                blobs.RenderBlobs(imgSrc, imgRender);

                using (new CvWindow("src", imgSrc))
                using (new CvWindow("binary", imgBinary))
                using (new CvWindow("render", imgRender))
                using (new CvWindow("contour", imgContour))
                using (new CvWindow("polygon vertices", imgPolygon))
                {
                    Cv.WaitKey(0);
                }
            }
        }