//座標算出
        void SendLEDPtData(ref UDP_PACKETS_CODER.UDP_PACKETS_ENCODER enc)
        {
            CameraSpacePoint LEDPt = this.calib.GetLEDPositinon(this.imageWidth,
                                                                this.imageHeigt,
                                                                this.colors,
                                                                new OpenCvSharp.CPlusPlus.Scalar(30, 93, 93),
                                                                this.depthBuffer);

            this.calib.MakeSaveData(this.calibMode, LEDPt);
            enc += LEDPt.X;
            enc += LEDPt.Y;
            enc += LEDPt.Z;
            Console.WriteLine("X:" + LEDPt.X.ToString());
            Console.WriteLine("Y:" + LEDPt.Y.ToString());
            Console.WriteLine("Z:" + LEDPt.Z.ToString());

            //キャリブレーション画像表示
            OpenCvSharp.CPlusPlus.Mat calibMat = calib.GetCalibrationImage().Clone();
            this.calibrationImage.Source          = OpenCvSharp.Extensions.WriteableBitmapConverter.ToWriteableBitmap(calibMat);
            this.CalibrationImageTable.IsSelected = true;

            //送信データ表示
            this.message.Content = "LEDPosition  " + "X:" + LEDPt.X.ToString() + " Y:" + LEDPt.Y.ToString() + " Z:" + LEDPt.Z.ToString();

            //データ送信モード変更
            this.SendData_LEDPt.IsChecked = false;
            this.SendData_Body.IsChecked  = true;
        }
Пример #2
0
        public void GetHsv(IplImage src)
        {
            IplImage hsv = new IplImage(WIDTH, HEIGHT, BitDepth.U8, 3);

            //RGBからHSVに変換
            Cv.CvtColor(src, hsv, ColorConversion.BgrToHsv);

            OpenCvSharp.CPlusPlus.Mat mat = new OpenCvSharp.CPlusPlus.Mat(hsv, true);
            int matw = mat.Width;
            int math = mat.Height;

            var re = mat.At <OpenCvSharp.CPlusPlus.Vec3b>(PointY, PointX);

            Hdata = re[0];
            Sdata = re[1];
            Vdata = re[2];
        }
Пример #3
0
        private void CheckColor()
        {
            IplImage source = new IplImage(640, 360, BitDepth.U8, 3);

            const int TEST_FRAME = 36;

            var ListH = new List <int>();

            int side = (int)Math.Sqrt(TEST_FRAME);//検査枠の1辺の長さ

            try
            {
                Task.Run(() =>
                {
                    while (FlagCheckCol)
                    {
                        try
                        {
                            //画像を取得する処理
                            if (!General.camLed.GetPic())
                            {
                                continue;
                            }

                            source = General.camLed.imageForTest;

                            using (var hsv = new IplImage(640, 360, BitDepth.U8, 3)) // グレースケール画像格納用の変数
                            {
                                //RGBからHSVに変換
                                Cv.CvtColor(source, hsv, ColorConversion.BgrToHsv);

                                OpenCvSharp.CPlusPlus.Mat mat = new OpenCvSharp.CPlusPlus.Mat(hsv, true);

                                TestpointForGetLum = GetTestPoint();
                                TestpointForGetLum.ForEach(l =>
                                {
                                    ListH.Clear();
                                    foreach (var i in Enumerable.Range(0, side))
                                    {
                                        foreach (var j in Enumerable.Range(0, side))
                                        {
                                            var re = mat.At <OpenCvSharp.CPlusPlus.Vec3b>(l.Item1.Y - (side / 2) + i, l.Item1.X - (side / 2) + j);
                                            if (re[0] != 0)
                                            {
                                                ListH.Add(re[0]);
                                            }
                                        }
                                    }

                                    string Hue = (ListH.Count != 0) ? ListH.Average().ToString("F0") : "0";

                                    switch (l.Item2)
                                    {
                                    case SEG_NAME.LED1:
                                        State.VmLedPoint.LED1Hue = Hue;
                                        break;

                                    case SEG_NAME.LED2:
                                        State.VmLedPoint.LED2Hue = Hue;
                                        break;

                                    case SEG_NAME.LED3:
                                        State.VmLedPoint.LED3Hue = Hue;
                                        break;

                                    case SEG_NAME.LED4:
                                        State.VmLedPoint.LED4Hue = Hue;
                                        break;
                                    }
                                });
                            }
                        }
                        catch
                        {
                            FlagCheckCol = false;
                        }
                    }
                });
            }
            finally
            {
                source.Dispose();
            }
        }
Пример #4
0
        public static void CheckLed()//引数には点灯するLEDを指定する
        {
            InitList();
            State.SetCamProp();
            General.cam.ResetFlag();               //カメラのフラグを初期化 リトライ時にフラグが初期化できてないとだめ
                                                   //例 NGリトライ時は、General.cam.FlagFrame = trueになっていてNGフレーム表示の無限ループにいる

            int side = (int)Math.Sqrt(TEST_FRAME); //検査枠の1辺の長さ

            try
            {
                //cam0の画像を取得する処理
                General.cam.FlagTestPic = true;
                while (General.cam.FlagTestPic)
                {
                }

                source = General.cam.imageForTest;

                using (IplImage gray = Cv.CreateImage(new CvSize(WIDTH, HEIGHT), BitDepth.U8, 1)) // グレースケール画像格納用の変数
                {
                    Cv.CvtColor(source, gray, ColorConversion.BgrToGray);                         // グレースケール変換
                    Cv.Threshold(gray, gray, State.camProp.BinLevel, 255, ThresholdType.Binary);  // グレースケール画像を2値化
                                                                                                  //ノイズ除去
                    if (State.camProp.Opening)
                    {
                        Cv.Erode(gray, gray, null, State.camProp.CloseCnt); //収縮処理
                        Cv.Dilate(gray, gray, null, State.camProp.OpenCnt); //膨張処理
                    }
                    {
                        Cv.Dilate(gray, gray, null, State.camProp.OpenCnt); //膨張処理
                        Cv.Erode(gray, gray, null, State.camProp.CloseCnt); //収縮処理
                    }


                    //デバッグ用コード(下記コメントを外すと画像を保存します)
                    //gray.SaveImage(@"C:\OS303\BinPic.bmp");

                    var mat = new OpenCvSharp.CPlusPlus.Mat(gray, true);

                    ListLedSpec.ForEach(l =>
                    {
                        int onCount = 0;
                        foreach (var i in Enumerable.Range(0, side))
                        {
                            foreach (var j in Enumerable.Range(0, side))
                            {
                                var re = mat.At <OpenCvSharp.CPlusPlus.Vec3b>((int)l.y - (side / 2) + i, (int)l.x - (side / 2) + j);
                                //点灯確認
                                if (re[0] == 255) //HSV=0,0,255 => 白
                                {
                                    onCount++;
                                }
                            }
                        }

                        l.OnCount = onCount;

                        //ビューモデル更新
                        switch (l.name)
                        {
                        case NAME.LED1:
                            State.VmTestResults.LED1Value = l.OnCount.ToString("F0");
                            break;

                        case NAME.LED2:
                            State.VmTestResults.LED2Value = l.OnCount.ToString("F0");
                            break;

                        case NAME.LED3:
                            State.VmTestResults.LED3Value = l.OnCount.ToString("F0");
                            break;
                        }
                    });
                }
            }
            catch
            {
            }
            finally
            {
                General.cam.ResetFlag();//カメラのフラグを初期化 リトライ時にフラグが初期化できてないとだめ
            }
        }
Пример #5
0
        public static bool CheckColor(NAME name)
        {
            State.SetCamProp();
            General.cam.ResetFlag();//カメラのフラグを初期化 リトライ時にフラグが初期化できてないとだめ
            //例 NGリトライ時は、General.cam.FlagFrame = trueになっていてNGフレーム表示の無限ループにいる

            int X      = 0;
            int Y      = 0;
            int HueMax = 0;
            int HueMin = 0;



            var ListH = new List <int>();
            //var ListS = new List<int>();
            //var ListV = new List<int>();

            int side = (int)Math.Sqrt(TEST_FRAME);//検査枠の1辺の長さ

            try
            {
                switch (name)
                {
                case NAME.LED1:
                    X      = Int32.Parse(State.camProp.LED1.Split('/').ToArray()[0]);
                    Y      = Int32.Parse(State.camProp.LED1.Split('/').ToArray()[1]);
                    HueMax = State.TestSpec.OrangeHueMax;
                    HueMin = State.TestSpec.OrangeHueMin;
                    break;

                case NAME.LED2:
                    X      = Int32.Parse(State.camProp.LED2.Split('/').ToArray()[0]);
                    Y      = Int32.Parse(State.camProp.LED2.Split('/').ToArray()[1]);
                    HueMax = State.TestSpec.GreenHueMax;
                    HueMin = State.TestSpec.GreenHueMin;
                    break;

                case NAME.LED3:
                    X      = Int32.Parse(State.camProp.LED3.Split('/').ToArray()[0]);
                    Y      = Int32.Parse(State.camProp.LED3.Split('/').ToArray()[1]);
                    HueMax = State.TestSpec.RedHueMax;
                    HueMin = State.TestSpec.RedHueMin;
                    break;
                }

                //cam0の画像を取得する処理
                General.cam.FlagTestPic = true;
                while (General.cam.FlagTestPic)
                {
                }

                source = General.cam.imageForTest;

                //デバッグ用コード(下記コメントを外すと画像を保存します)
                //source.SaveImage(@"C:\OS303\ColorPic.bmp");

                using (IplImage hsv = new IplImage(640, 360, BitDepth.U8, 3)) // グレースケール画像格納用の変数
                {
                    //RGBからHSVに変換
                    Cv.CvtColor(source, hsv, ColorConversion.BgrToHsv);

                    OpenCvSharp.CPlusPlus.Mat mat = new OpenCvSharp.CPlusPlus.Mat(hsv, true);


                    foreach (var i in Enumerable.Range(0, side))
                    {
                        foreach (var j in Enumerable.Range(0, side))
                        {
                            var re = mat.At <OpenCvSharp.CPlusPlus.Vec3b>((int)Y - (side / 2) + i, (int)X - (side / 2) + j);
                            if (re[1] == 255 && re[2] == 255)
                            {
                                ListH.Add(re[0]);
                            }
                        }
                    }

                    switch (name)
                    {
                    case NAME.LED1:    //黄色
                        H_Ave = ListH.Min();
                        break;

                    case NAME.LED2:    //緑色
                        H_Ave = ListH.Max();
                        break;

                    case NAME.LED3:    //赤色
                        H_Ave = ListH.Min();
                        break;
                    }


                    return(H_Ave >= HueMin && H_Ave <= HueMax);
                }
            }
            finally
            {
                string hsvValue = H_Ave.ToString("F0");

                ColorHSV hsv   = new ColorHSV((float)Test_Led.H_Ave / 180, 1, 1);
                var      rgb   = ColorConv.HSV2RGB(hsv);
                var      color = new SolidColorBrush(Color.FromRgb(rgb.R, rgb.G, rgb.B));
                color.Freeze();//これ重要!!!

                switch (name)
                {
                case NAME.LED1:
                    State.VmTestResults.HueLED1   = hsvValue;
                    State.VmTestResults.ColorLED1 = color;
                    break;

                case NAME.LED2:
                    State.VmTestResults.HueLED2   = hsvValue;
                    State.VmTestResults.ColorLED2 = color;
                    break;

                case NAME.LED3:
                    State.VmTestResults.HueLED3   = hsvValue;
                    State.VmTestResults.ColorLED3 = color;
                    break;
                }
            }
        }
Пример #6
0
        // Watershedアルゴリズムによる画像の領域分割
        // パラメータ: 分割数(横), 分割数(縦), マーカサイズ
        private OpenCvSharp.CPlusPlus.Mat procWatershed(OpenCvSharp.CPlusPlus.Mat matSrc, int wdiv, int hdiv, int msize)
        {
            // Matの準備
            OpenCvSharp.CPlusPlus.Mat matDst = matSrc.Clone();

            // IplImageの準備(C API用)
            OpenCvSharp.IplImage iplSrc = matSrc.ToIplImage();
            OpenCvSharp.IplImage iplDst = iplSrc.Clone();

            // マーカ画像の準備
            OpenCvSharp.IplImage iplMarker = new OpenCvSharp.IplImage(iplSrc.Size, OpenCvSharp.BitDepth.S32, 1);
            iplMarker.Zero();

            // マーカ設置(等分割)
            OpenCvSharp.CvPoint[,] mpt = new OpenCvSharp.CvPoint[wdiv, hdiv];
            for (int i = 0; i < wdiv; i++)
            {
                for (int j = 0; j < hdiv; j++)
                {
                    mpt[i, j] = new OpenCvSharp.CvPoint((int)(iplSrc.Width / wdiv * (i + 0.5)), (int)(iplSrc.Height / hdiv * (j + 0.5)));
                    iplMarker.Circle(mpt[i, j], msize, OpenCvSharp.CvScalar.ScalarAll(i * wdiv + j), OpenCvSharp.Cv.FILLED, OpenCvSharp.LineType.Link8, 0);
                }
            }

            // 分割実行
            OpenCvSharp.Cv.Watershed(iplSrc, iplMarker);

            // マーカの描画
            for (int i = 0; i < wdiv; i++)
            {
                for (int j = 0; j < hdiv; j++)
                {
                    iplDst.Circle(mpt[i, j], msize, OpenCvSharp.CvColor.White, 3, OpenCvSharp.LineType.Link8, 0);
                }
            }

            // 領域境界の描画
            for (int i = 0; i < iplMarker.Height; i++)
            {
                for (int j = 0; j < iplMarker.Width; j++)
                {
                    int idx = (int)(iplMarker.Get2D(i, j).Val0);
                    if (idx == -1)
                    {
                        iplDst.Set2D(i, j, OpenCvSharp.CvColor.Red);
                    }
                }
            }

            // IplImage -> Matに戻す
            matDst = new OpenCvSharp.CPlusPlus.Mat(iplDst);

            return matDst;
        }
Пример #7
0
        private void 読み込みToolStripMenuItem_Click(object sender, EventArgs e)
        {
            // データ読み込み
            OpenFileDialog dialog = new OpenFileDialog();
            if (dialog.ShowDialog() == DialogResult.OK)
            {
                this.matOrg = new OpenCvSharp.CPlusPlus.Mat(dialog.FileName);
                this.matL = this.matOrg.Clone();
                this.matR = this.matOrg.Clone();

                this.outputImageL = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(matL);
                this.outputImageR = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(matR);

                this.outputZoom = 1.0;
                this.outputShift = new Point(0, 0);
                this.isMouseDrag = false;
                this.mousePre = new Point(0, 0);

                this.comboBoxProcL.SelectedIndex = 0;
                this.comboBoxProcR.SelectedIndex = 0;

                this.pictureBoxL.Invalidate();
                this.pictureBoxR.Invalidate();
            }
        }
Пример #8
0
        // 確率的Hough変換(Canny + Hough) + 統計情報
        private OpenCvSharp.CPlusPlus.Mat procHoughStat(OpenCvSharp.CPlusPlus.Mat matSrc, int votes, double minLength, double maxGap)
        {
            OpenCvSharp.CPlusPlus.Mat matDst = matSrc.Clone();
            OpenCvSharp.CPlusPlus.Mat matGray = new OpenCvSharp.CPlusPlus.Mat(matSrc.Rows, matSrc.Cols, OpenCvSharp.CPlusPlus.MatType.CV_8UC1);
            OpenCvSharp.CPlusPlus.Mat matBinary = new OpenCvSharp.CPlusPlus.Mat(matSrc.Rows, matSrc.Cols, OpenCvSharp.CPlusPlus.MatType.CV_8UC1);

            OpenCvSharp.CPlusPlus.Cv2.CvtColor(matSrc, matGray, OpenCvSharp.ColorConversion.BgraToGray, 1);
            OpenCvSharp.CPlusPlus.Cv2.Threshold(matGray, matBinary, 0, 255, OpenCvSharp.ThresholdType.Binary | OpenCvSharp.ThresholdType.Otsu);

            // Hough変換
            double rho = 1.0;               // 距離分解能
            double theta = Math.PI / 180.0; // 角度分解能
            OpenCvSharp.CvLineSegmentPoint[] lines = OpenCvSharp.CPlusPlus.Cv2.HoughLinesP(matBinary, rho, theta, votes, minLength, maxGap);

            // 描画
            Random rnd = new Random();
            foreach (OpenCvSharp.CvLineSegmentPoint it in lines)
            {
                //matDst.Line(it.P1, it.P2, new OpenCvSharp.CPlusPlus.Scalar(0, 0, 255), 1, OpenCvSharp.LineType.AntiAlias, 0);
                matDst.Line(it.P1, it.P2, new OpenCvSharp.CPlusPlus.Scalar(rnd.Next(0, 255), rnd.Next(0, 255), rnd.Next(0, 255)), 1, OpenCvSharp.LineType.AntiAlias, 0);
            }

            // 本数、平均長、最大長の計算と表示
            //int divNumR = 10;
            //int divNumC = 10;
            int divSizeR = matDst.Rows / divNumR;
            int divSizeC = matDst.Cols / divNumC;

            //double[,] sum = new double[divNumR, divNumC];
            //double[,] max = new double[divNumR, divNumC];
            //int[,] num = new int[divNumR, divNumC];

            foreach (OpenCvSharp.CvLineSegmentPoint it in lines)
            {
                double midR = (it.P1.Y + it.P2.Y) / 2;
                double midC = (it.P1.X + it.P2.X) / 2;
                double dist = it.P1.DistanceTo(it.P2);

                for (int r = 0; r < divNumR; r++)
                {
                    for (int c = 0; c < divNumC; c++)
                    {
                        if (midR >= divSizeR * r && midR < divSizeR * (r + 1) && midC >= divSizeC * c && midC < divSizeC * (c + 1))
                        {
                            sum[r, c] += dist;
                            num[r, c]++;
                            if (max[r, c] < dist)
                            {
                                max[r, c] = dist;
                            }
                        }
                    }
                }
            }

            for (int r = 0; r < divNumR; r++)
            {
                matDst.Line(new OpenCvSharp.CPlusPlus.Point(0, divSizeR * r), new OpenCvSharp.CPlusPlus.Point(matDst.Cols, divSizeR * r), new OpenCvSharp.CPlusPlus.Scalar(0, 0, 255), 1, OpenCvSharp.LineType.AntiAlias, 0);
                for (int c = 0; c < divNumC; c++)
                {
                    matDst.Line(new OpenCvSharp.CPlusPlus.Point(divSizeC * c, 0), new OpenCvSharp.CPlusPlus.Point(divSizeC * c, matDst.Cols), new OpenCvSharp.CPlusPlus.Scalar(0, 0, 255), 1, OpenCvSharp.LineType.AntiAlias, 0);

                    if (num[r, c] > 0)
                    {
                        OpenCvSharp.CPlusPlus.Cv2.PutText(matDst, num[r, c].ToString(), new OpenCvSharp.CPlusPlus.Point(10 + divSizeC * c, 20 + divSizeR * r), OpenCvSharp.FontFace.HersheySimplex, 0.5, new OpenCvSharp.CPlusPlus.Scalar(0, 0, 255), 2, OpenCvSharp.LineType.AntiAlias);
                        OpenCvSharp.CPlusPlus.Cv2.PutText(matDst, (sum[r, c] / num[r, c]).ToString("F2"), new OpenCvSharp.CPlusPlus.Point(10 + divSizeC * c, 40 + divSizeR * r), OpenCvSharp.FontFace.HersheySimplex, 0.5, new OpenCvSharp.CPlusPlus.Scalar(0, 0, 255), 2, OpenCvSharp.LineType.AntiAlias);
                        OpenCvSharp.CPlusPlus.Cv2.PutText(matDst, max[r, c].ToString("F2"), new OpenCvSharp.CPlusPlus.Point(10 + divSizeC * c, 60 + divSizeR * r), OpenCvSharp.FontFace.HersheySimplex, 0.5, new OpenCvSharp.CPlusPlus.Scalar(0, 0, 255), 2, OpenCvSharp.LineType.AntiAlias);
                    }
                }
            }

            return matDst;
        }
Пример #9
0
        // 画像ピラミッドを用いた画像の領域分割
        // パラメータ: ピラミッドレベル, ピクセルを接続する閾値, クラスタリングの範囲の閾値
        private OpenCvSharp.CPlusPlus.Mat procPyrSegmentation(OpenCvSharp.CPlusPlus.Mat matSrc, int level, double threshold1, double threshold2)
        {
            // Matの準備
            OpenCvSharp.CPlusPlus.Mat matDst = matSrc.Clone();

            // IplImageの準備(C API用)
            OpenCvSharp.IplImage iplSrc = matSrc.ToIplImage();
            OpenCvSharp.IplImage iplDst = iplSrc.Clone();

            // ピラミッド画像作成のためのROI設定(2^levelで割り切れるサイズ)
            OpenCvSharp.CvRect roi;
            roi.X = 0;
            roi.Y = 0;
            roi.Width = iplSrc.Width & -(1 << level);
            roi.Height = iplSrc.Height & -(1 << level);
            iplSrc.SetROI(roi);
            iplDst.SetROI(roi);

            OpenCvSharp.Cv.PyrSegmentation(iplSrc, iplDst, level, threshold1, threshold2);

            // IplImage -> Matに戻す
            matDst = new OpenCvSharp.CPlusPlus.Mat(iplDst);

            return matDst;
        }
Пример #10
0
        // 確率的Hough変換(Canny + Hough)
        private OpenCvSharp.CPlusPlus.Mat procHough(OpenCvSharp.CPlusPlus.Mat matSrc, int votes, double minLength, double maxGap)
        {
            OpenCvSharp.CPlusPlus.Mat matDst = matSrc.Clone();
            OpenCvSharp.CPlusPlus.Mat matCanny = new OpenCvSharp.CPlusPlus.Mat(matSrc.Rows, matSrc.Cols, OpenCvSharp.CPlusPlus.MatType.CV_8UC1);

            OpenCvSharp.CPlusPlus.Cv2.Canny(matSrc, matCanny, 100, 200, 3);

            // Hough変換
            double rho = 1.0;               // 距離分解能
            double theta = Math.PI / 180.0; // 角度分解能
            OpenCvSharp.CvLineSegmentPoint[] lines = OpenCvSharp.CPlusPlus.Cv2.HoughLinesP(matCanny, rho, theta, votes, minLength, maxGap);

            // 描画
            Random rnd = new Random();
            foreach (OpenCvSharp.CvLineSegmentPoint it in lines)
            {
                //matDst.Line(it.P1, it.P2, new OpenCvSharp.CPlusPlus.Scalar(0, 0, 255), 1, OpenCvSharp.LineType.AntiAlias, 0);
                matDst.Line(it.P1, it.P2, new OpenCvSharp.CPlusPlus.Scalar(rnd.Next(0, 255), rnd.Next(0, 255), rnd.Next(0, 255)), 1, OpenCvSharp.LineType.AntiAlias, 0);
            }

            return matDst;
        }
Пример #11
0
        // グレースケール化
        private OpenCvSharp.CPlusPlus.Mat procGrayScale(OpenCvSharp.CPlusPlus.Mat matSrc)
        {
            OpenCvSharp.CPlusPlus.Mat matDst = matSrc.Clone();
            OpenCvSharp.CPlusPlus.Mat matGray = new OpenCvSharp.CPlusPlus.Mat(matSrc.Rows, matSrc.Cols, OpenCvSharp.CPlusPlus.MatType.CV_8UC1);

            OpenCvSharp.CPlusPlus.Cv2.CvtColor(matSrc, matGray, OpenCvSharp.ColorConversion.BgraToGray, 1);

            OpenCvSharp.CPlusPlus.Cv2.CvtColor(matGray, matDst, OpenCvSharp.ColorConversion.GrayToBgra, 3);

            return matDst;
        }
Пример #12
0
        // 輪郭抽出(グレースケール + 2値化 + 輪郭抽出)(輪郭のみの画像を戻す)
        private OpenCvSharp.CPlusPlus.Mat procContour(OpenCvSharp.CPlusPlus.Mat matSrc)
        {
            OpenCvSharp.CPlusPlus.Mat matDst = new OpenCvSharp.CPlusPlus.Mat(matSrc.Rows, matSrc.Cols, OpenCvSharp.CPlusPlus.MatType.CV_8UC3, new OpenCvSharp.CPlusPlus.Scalar(0, 0, 0));
            OpenCvSharp.CPlusPlus.Mat matGray = new OpenCvSharp.CPlusPlus.Mat(matSrc.Rows, matSrc.Cols, OpenCvSharp.CPlusPlus.MatType.CV_8UC1);
            OpenCvSharp.CPlusPlus.Mat matBinary = new OpenCvSharp.CPlusPlus.Mat(matSrc.Rows, matSrc.Cols, OpenCvSharp.CPlusPlus.MatType.CV_8UC1);

            OpenCvSharp.CPlusPlus.Cv2.CvtColor(matSrc, matGray, OpenCvSharp.ColorConversion.BgraToGray, 1);
            OpenCvSharp.CPlusPlus.Cv2.Threshold(matGray, matBinary, 0, 255, OpenCvSharp.ThresholdType.Binary | OpenCvSharp.ThresholdType.Otsu);

            // 輪郭抽出
            OpenCvSharp.CPlusPlus.Mat[] contours;
            OpenCvSharp.CPlusPlus.Mat hierarchy = new OpenCvSharp.CPlusPlus.Mat();
            OpenCvSharp.CPlusPlus.Cv2.FindContours(matBinary, out contours, hierarchy, OpenCvSharp.ContourRetrieval.Tree, OpenCvSharp.ContourChain.ApproxNone);
            //OpenCvSharp.CPlusPlus.Cv2.FindContours(matBinary, out contours, hierarchy, OpenCvSharp.ContourRetrieval.Tree, OpenCvSharp.ContourChain.ApproxSimple);
            //OpenCvSharp.CPlusPlus.Cv2.FindContours(matBinary, out contours, hierarchy, OpenCvSharp.ContourRetrieval.Tree, OpenCvSharp.ContourChain.ApproxTC89KCOS);
            //OpenCvSharp.CPlusPlus.Cv2.FindContours(matBinary, out contours, hierarchy, OpenCvSharp.ContourRetrieval.Tree, OpenCvSharp.ContourChain.ApproxTC89L1);

            // 描画
            //OpenCvSharp.CPlusPlus.Cv2.DrawContours(matDst, contours, -1, new OpenCvSharp.CPlusPlus.Scalar(255, 255, 255), OpenCvSharp.Cv.FILLED, OpenCvSharp.LineType.AntiAlias, hierarchy);
            OpenCvSharp.CPlusPlus.Cv2.DrawContours(matDst, contours, -1, new OpenCvSharp.CPlusPlus.Scalar(255, 255, 255), 1, OpenCvSharp.LineType.AntiAlias, hierarchy);

            return matDst;
        }
Пример #13
0
        // エッジ抽出(Canny)
        private OpenCvSharp.CPlusPlus.Mat procCanny(OpenCvSharp.CPlusPlus.Mat matSrc, double threshold1, double threshold2, int apertureSize)
        {
            OpenCvSharp.CPlusPlus.Mat matDst = matSrc.Clone();
            OpenCvSharp.CPlusPlus.Mat matCanny = new OpenCvSharp.CPlusPlus.Mat(matSrc.Rows, matSrc.Cols, OpenCvSharp.CPlusPlus.MatType.CV_8UC1);

            OpenCvSharp.CPlusPlus.Cv2.Canny(matSrc, matCanny, threshold1, threshold2, apertureSize);

            OpenCvSharp.CPlusPlus.Cv2.CvtColor(matCanny, matDst, OpenCvSharp.ColorConversion.GrayToBgra, 3);

            return matDst;
        }
Пример #14
0
        // 2値化(グレースケール化 + 2値化)
        private OpenCvSharp.CPlusPlus.Mat procBinary(OpenCvSharp.CPlusPlus.Mat matSrc)
        {
            OpenCvSharp.CPlusPlus.Mat matDst = matSrc.Clone();
            OpenCvSharp.CPlusPlus.Mat matGray = new OpenCvSharp.CPlusPlus.Mat(matSrc.Rows, matSrc.Cols, OpenCvSharp.CPlusPlus.MatType.CV_8UC1);
            OpenCvSharp.CPlusPlus.Mat matBinary = new OpenCvSharp.CPlusPlus.Mat(matSrc.Rows, matSrc.Cols, OpenCvSharp.CPlusPlus.MatType.CV_8UC1);

            OpenCvSharp.CPlusPlus.Cv2.CvtColor(matSrc, matGray, OpenCvSharp.ColorConversion.BgraToGray, 1);
            OpenCvSharp.CPlusPlus.Cv2.Threshold(matGray, matBinary, 0, 255, OpenCvSharp.ThresholdType.Binary | OpenCvSharp.ThresholdType.Otsu);

            OpenCvSharp.CPlusPlus.Cv2.CvtColor(matBinary, matDst, OpenCvSharp.ColorConversion.GrayToBgra, 3);

            return matDst;
        }