Пример #1
0
        public VideoWriter()
        {
            // (1)カメラに対するキャプチャ構造体を作成する
            using (CvCapture capture = CvCapture.FromCamera(0))
            {
                // (2)キャプチャサイズを取得する(この設定は,利用するカメラに依存する)
                int width = capture.FrameWidth;
                int height = capture.FrameHeight;
                double fps = 15;//capture.Fps;
                // (3)ビデオライタ構造体を作成する
                using (CvVideoWriter writer = new CvVideoWriter("cap.avi", FourCC.Prompt, fps, new CvSize(width, height)))
                using (CvFont font = new CvFont(FontFace.HersheyComplex, 0.7, 0.7))
                using (CvWindow window = new CvWindow("Capture", WindowMode.AutoSize))
                {
                    // (4)カメラから画像をキャプチャし,ファイルに書き出す
                    for (int frames = 0; ; frames++)
                    {
                        IplImage frame = capture.QueryFrame();
                        string str = string.Format("{0}[frame]", frames);
                        frame.PutText(str, new CvPoint(10, 20), font, new CvColor(0, 255, 100));
                        writer.WriteFrame(frame);
                        window.ShowImage(frame);

                        int key = CvWindow.WaitKey((int)(1000 / fps));
                        if (key == '\x1b')
                        {
                            break;
                        }
                    }
                }
            }

        }
Пример #2
0
        static void showrite(String s, CvMat image)
        {
            CvWindow window = new CvWindow(showiteCounter.ToString() + s);
            window.ShowImage(image);
            image.SaveImage(showiteCounter.ToString() + s + ".png");

            showiteCounter++;
        }
Пример #3
0
        public Histogram()
        {
            // cvCalcHist
            // コントラストや明度をいろいろ変えられるサンプル

            const int histSize = 64;
            float[] range0 = { 0, 256 };
            float[][] ranges = { range0 };

            // 画像の読み込み
            using (IplImage srcImg = new IplImage(Const.ImageLenna, LoadMode.GrayScale))
            using (IplImage dstImg = srcImg.Clone())
            using (IplImage histImg = new IplImage(new CvSize(400, 400), BitDepth.U8, 1))
            using (CvHistogram hist = new CvHistogram(new int[] { histSize }, HistogramFormat.Array, ranges, true))
            {
                using (CvWindow windowImage = new CvWindow("image", WindowMode.AutoSize))
                using (CvWindow windowHist = new CvWindow("histogram", WindowMode.AutoSize))
                {
                    // トラックバーが動かされた時の処理
                    CvTrackbar ctBrightness = null;
                    CvTrackbar ctContrast = null;
                    CvTrackbarCallback callback = delegate(int pos)
                    {
                        int brightness = ctBrightness.Pos - 100;
                        int contrast = ctContrast.Pos - 100;
                        // LUTの適用
                        byte[] lut = CalcLut(contrast, brightness);
                        srcImg.LUT(dstImg, lut);
                        // ヒストグラムの描画
                        CalcHist(dstImg, hist);
                        DrawHist(histImg, hist, histSize);
                        // ウィンドウに表示
                        windowImage.ShowImage(dstImg);
                        windowHist.ShowImage(histImg);
                        dstImg.Zero();
                        histImg.Zero();
                    };

                    // トラックバーの作成
                    // (OpenCVでは現在位置にポインタを渡すことでトラックバーの位置の変化が取得できるが、
                    // .NETではGCによりポインタが移動してしまうので廃止した。別の方法でうまく取得すべし。)
                    ctBrightness = windowImage.CreateTrackbar("brightness", 100, 200, callback);
                    ctContrast = windowImage.CreateTrackbar("contrast", 100, 200, callback);
                    // 初回描画
                    callback(0);

                    // キー入力待ち
                    Cv.WaitKey(0);
                }
            }
        }
Пример #4
0
        public ConvexHull()
        {
            using (IplImage img = Cv.CreateImage(new CvSize(500, 500), BitDepth.U8, 3))
            using (CvWindow window = new CvWindow("hull"))
            {
                Random rand = new Random();

                for (; ; )
                {
                    int count = rand.Next() % 100 + 1;

                    // create sequence of random points
                    CvPoint[] ptseq = new CvPoint[count];
                    for (int i = 0; i < ptseq.Length; i++)
                    {
                        ptseq[i] = new CvPoint
                        {
                            X = rand.Next() % (img.Width / 2) + img.Width / 4,
                            Y = rand.Next() % (img.Height / 2) + img.Height / 4
                        };
                    }

                    // draw points
                    Cv.Zero(img);
                    foreach(CvPoint pt in ptseq)
                    {
                        Cv.Circle(img, pt, 2, new CvColor(255, 0, 0), -1);
                    }

                    // find hull
                    CvPoint[] hull;
                    Cv.ConvexHull2(ptseq, out hull, ConvexHullOrientation.Clockwise);

                    // draw hull
                    CvPoint pt0 = hull.Last();
                    foreach(CvPoint pt in hull)
                    {
                        Cv.Line(img, pt0, pt, CvColor.Green);
                        pt0 = pt;
                    }


                    window.ShowImage(img);

                    if (Cv.WaitKey(0) == 27) // 'ESC'
                        break;
                }

            }
        }
Пример #5
0
        public Moments()
        {
            // (1)画像を読み込む.3チャンネル画像の場合はCOIがセットされていなければならない
            using (IplImage srcImg = new IplImage(Const.ImageLenna, LoadMode.AnyColor | LoadMode.AnyDepth))
            {
                if (srcImg.NChannels == 3 && srcImg.COI == 0)
                {
                    srcImg.COI = 1;
                }
                // (2)入力画像の3次までの画像モーメントを計算する
                CvMoments moments = new CvMoments(srcImg, false);
                srcImg.COI = 0;

                // (3)モーメントやHuモーメント不変量を,得られたCvMoments構造体の値を使って計算する.
                double spatialMoment = moments.GetSpatialMoment(0, 0);
                double centralMoment = moments.GetCentralMoment(0, 0);
                double normCMoment = moments.GetNormalizedCentralMoment(0, 0);
                CvHuMoments huMoments = new CvHuMoments(moments);

                // (4)得られたモーメントやHuモーメント不変量を文字として画像に描画
                using (CvFont font = new CvFont(FontFace.HersheySimplex, 1.0, 1.0, 0, 2, LineType.Link8))
                {
                    string[] text = new string[10];
                    text[0] = string.Format("spatial={0:F3}", spatialMoment);
                    text[1] = string.Format("central={0:F3}", centralMoment);
                    text[2] = string.Format("norm={0:F3}", spatialMoment);
                    text[3] = string.Format("hu1={0:F10}", huMoments.Hu1);
                    text[4] = string.Format("hu2={0:F10}", huMoments.Hu2);
                    text[5] = string.Format("hu3={0:F10}", huMoments.Hu3);
                    text[6] = string.Format("hu4={0:F10}", huMoments.Hu4);
                    text[7] = string.Format("hu5={0:F10}", huMoments.Hu5);
                    text[8] = string.Format("hu6={0:F10}", huMoments.Hu6);
                    text[9] = string.Format("hu7={0:F10}", huMoments.Hu7);

                    CvSize textSize = font.GetTextSize(text[0]);
                    for (int i = 0; i < 10; i++)
                    {
                        srcImg.PutText(text[i], new CvPoint(10, (textSize.Height + 3) * (i + 1)), font, CvColor.Black);
                    }
                }

                // (5)入力画像とモーメント計算結果を表示,キーが押されたときに終了
                using (CvWindow window = new CvWindow("Image", WindowMode.AutoSize))
                {
                    window.ShowImage(srcImg);
                    Cv.WaitKey(0);
                }
            }

        }
Пример #6
0
        public Moments()
        {
            using (IplImage srcImg = new IplImage(FilePath.Image.Lenna, LoadMode.AnyColor | LoadMode.AnyDepth))
            {
                if (srcImg.NChannels == 3 && srcImg.COI == 0)
                {
                    srcImg.COI = 1;
                }

                CvMoments moments = new CvMoments(srcImg, false);
                srcImg.COI = 0;

                double spatialMoment = moments.GetSpatialMoment(0, 0);
                double centralMoment = moments.GetCentralMoment(0, 0);
                double normCMoment = moments.GetNormalizedCentralMoment(0, 0);
                CvHuMoments huMoments = new CvHuMoments(moments);

                // drawing
                using (CvFont font = new CvFont(FontFace.HersheySimplex, 1.0, 1.0, 0, 2, LineType.Link8))
                {
                    string[] text = new string[10];
                    text[0] = string.Format("spatial={0:F3}", spatialMoment);
                    text[1] = string.Format("central={0:F3}", centralMoment);
                    text[2] = string.Format("norm={0:F3}", spatialMoment);
                    text[3] = string.Format("hu1={0:F10}", huMoments.Hu1);
                    text[4] = string.Format("hu2={0:F10}", huMoments.Hu2);
                    text[5] = string.Format("hu3={0:F10}", huMoments.Hu3);
                    text[6] = string.Format("hu4={0:F10}", huMoments.Hu4);
                    text[7] = string.Format("hu5={0:F10}", huMoments.Hu5);
                    text[8] = string.Format("hu6={0:F10}", huMoments.Hu6);
                    text[9] = string.Format("hu7={0:F10}", huMoments.Hu7);

                    CvSize textSize = font.GetTextSize(text[0]);
                    for (int i = 0; i < 10; i++)
                    {
                        srcImg.PutText(text[i], new CvPoint(10, (textSize.Height + 3) * (i + 1)), font, CvColor.Black);
                    }
                }

                using (var window = new CvWindow("Image", WindowMode.AutoSize))
                {
                    window.ShowImage(srcImg);
                    Cv.WaitKey(0);
                }
            }

        }
Пример #7
0
        public Histogram()
        {
            // cvCalcHist

            const int histSize = 64;
            float[] range0 = { 0, 256 };
            float[][] ranges = { range0 };

            using (IplImage srcImg = new IplImage(FilePath.Image.Lenna, LoadMode.GrayScale))
            using (IplImage dstImg = srcImg.Clone())
            using (IplImage histImg = new IplImage(new CvSize(400, 400), BitDepth.U8, 1))
            using (CvHistogram hist = new CvHistogram(new int[] { histSize }, HistogramFormat.Array, ranges, true))
            {
                using (CvWindow windowImage = new CvWindow("image", WindowMode.AutoSize))
                using (CvWindow windowHist = new CvWindow("histogram", WindowMode.AutoSize))
                {
                    CvTrackbar ctBrightness = null;
                    CvTrackbar ctContrast = null;
                    CvTrackbarCallback callback = delegate(int pos)
                    {
                        int brightness = ctBrightness.Pos - 100;
                        int contrast = ctContrast.Pos - 100;
                        // perform LUT
                        byte[] lut = CalcLut(contrast, brightness);
                        srcImg.LUT(dstImg, lut);
                        // draws histogram
                        CalcHist(dstImg, hist);
                        DrawHist(histImg, hist, histSize);

                        windowImage.ShowImage(dstImg);
                        windowHist.ShowImage(histImg);
                        dstImg.Zero();
                        histImg.Zero();
                    };

                    ctBrightness = windowImage.CreateTrackbar("brightness", 100, 200, callback);
                    ctContrast = windowImage.CreateTrackbar("contrast", 100, 200, callback);
                    // initial action
                    callback(0);

                    Cv.WaitKey(0);
                }
            }
        }
Пример #8
0
        public QtTest()
        {
            using (CvWindow window = new CvWindow("window", WindowMode.ExpandedGui))
            using (IplImage img = new IplImage(FilePath.Image.Lenna, LoadMode.Color))
            {
                if (CvWindow.HasQt)
                {
                    // cvAddText
                    CvFont font = new CvFontQt("MS UI Gothic", 48, CvColor.Red, FontWeight.Bold, FontStyle.Italic);
                    img.AddText("Hello Qt!!", new CvPoint(50, img.Height - 50), font);                    

                    // cvDisplayOverlay, cvDisplayStatusBar
                    window.DisplayOverlay("overlay text", 2000);
                    window.DisplayStatusBar("statusbar text", 3000);

                    // cvCreateButton
                    CvButtonCallback buttonCallback = delegate(int state, object userdata)
                    {
                        Console.WriteLine("Button state:{0} userdata:{1} ({2})", state, userdata, userdata.GetType());
                    };
                    Cv.CreateButton("button1", buttonCallback, "my userstate", ButtonType.Checkbox, 0);
                    Cv.CreateButton("button2", buttonCallback, 12345.6789, ButtonType.Checkbox, 0);

                    // cvSaveWindowParameters
                    //window.SaveWindowParameters();
                }

                window.ShowImage(img);

                // cvCreateTrackbar2
                CvTrackbarCallback2 trackbarCallback = delegate(int pos, object userdata)
                {
                    Console.WriteLine("Trackbar pos:{0} userdata:{1} ({2})", pos, userdata, userdata.GetType());
                };
                window.CreateTrackbar2("trackbar1", 128, 256, trackbarCallback, "foobar");

                Cv.WaitKey();
            }
        }
Пример #9
0
        public Inpaint()
        {
            // cvInpaint
            // 画像の不要な文字列部分に対するマスク画像を指定して文字列を除去する

            Console.WriteLine(
                "Hot keys: \n" +
                "\tESC - quit the program\n" +
                "\tr - restore the original image\n" +
                "\ti or ENTER - run inpainting algorithm\n" +
                "\t\t(before running it, paint something on the image)\n" +
                "\ts - save the original image, mask image, original+mask image and inpainted image to desktop."
            );

            // 原画像の読み込み
            using (IplImage img0 = new IplImage(Const.ImageFruits, LoadMode.AnyDepth | LoadMode.AnyColor))
            {
                // お絵かき用の画像を確保(マスク)
                using (IplImage img = img0.Clone())
                using (IplImage inpaintMask = new IplImage(img0.Size, BitDepth.U8, 1))
                // Inpaintの出力先画像を確保
                using (IplImage inpainted = img0.Clone())
                {
                    inpainted.Zero();
                    inpaintMask.Zero();

                    using (CvWindow wImage = new CvWindow("image", WindowMode.AutoSize, img))
                    {

                        // マウスイベントの処理
                        CvPoint prevPt = new CvPoint(-1, -1);
                        wImage.OnMouseCallback += delegate(MouseEvent ev, int x, int y, MouseEvent flags)
                        {
                            if (ev == MouseEvent.LButtonUp || (flags & MouseEvent.FlagLButton) == 0)
                            {
                                prevPt = new CvPoint(-1, -1);
                            }
                            else if (ev == MouseEvent.LButtonDown)
                            {
                                prevPt = new CvPoint(x, y);
                            }
                            else if (ev == MouseEvent.MouseMove && (flags & MouseEvent.FlagLButton) != 0)
                            {
                                CvPoint pt = new CvPoint(x, y);
                                if (prevPt.X < 0)
                                {
                                    prevPt = pt;
                                }
                                inpaintMask.Line(prevPt, pt, CvColor.White, 5, LineType.AntiAlias, 0);
                                img.Line(prevPt, pt, CvColor.White, 5, LineType.AntiAlias, 0);
                                prevPt = pt;
                                wImage.ShowImage(img);
                            }
                        };

                        for (; ; )
                        {
                            switch ((char)CvWindow.WaitKey(0))
                            {
                                case (char)27:    // ESCキーで終了
                                    CvWindow.DestroyAllWindows();
                                    return;
                                case 'r':   // 原画像を復元
                                    inpaintMask.Zero();
                                    img0.Copy(img);
                                    wImage.ShowImage(img);
                                    break;
                                case 'i':   // Inpaintの実行
                                case '\r':
                                    CvWindow wInpaint = new CvWindow("inpainted image", WindowMode.AutoSize);
                                    img.Inpaint(inpaintMask, inpainted, 3, InpaintMethod.Telea);
                                    wInpaint.ShowImage(inpainted);
                                    break;
                                case 's': // 画像の保存
                                    string desktop = Environment.GetFolderPath(Environment.SpecialFolder.Desktop);
                                    img0.SaveImage(Path.Combine(desktop, "original.png"));
                                    inpaintMask.SaveImage(Path.Combine(desktop, "mask.png"));
                                    img.SaveImage(Path.Combine(desktop, "original+mask.png"));
                                    inpainted.SaveImage(Path.Combine(desktop, "inpainted.png"));
                                    break;
                            }
                        }

                    }

                }
            }

        }
Пример #10
0
        public FindContours()
        {
            // cvFindContoursm cvDrawContours
            // 画像中から輪郭を検出し,-1~+1までのレベルにある輪郭を描画する

            const int SIZE = 500;

            using (IplImage img = new IplImage(SIZE, SIZE, BitDepth.U8, 1))
            {
                // 画像の初期化
                img.Zero();
                for (int i = 0; i < 6; i++)
                {
                    int dx = (i % 2) * 250 - 30;
                    int dy = (i / 2) * 150;
                    if (i == 0)
                    {
                        for (int j = 0; j <= 10; j++)
                        {
                            double angle = (j + 5) * Cv.PI / 21;
                            CvPoint p1 = new CvPoint(Cv.Round(dx + 100 + j * 10 - 80 * Math.Cos(angle)), Cv.Round(dy + 100 - 90 * Math.Sin(angle)));
                            CvPoint p2 = new CvPoint(Cv.Round(dx + 100 + j * 10 - 30 * Math.Cos(angle)), Cv.Round(dy + 100 - 30 * Math.Sin(angle)));
                            Cv.Line(img, p1, p2, CvColor.White, 1, LineType.AntiAlias, 0);
                        }
                    }
                    Cv.Ellipse(img, new CvPoint(dx + 150, dy + 100), new CvSize(100, 70), 0, 0, 360, CvColor.White, -1, LineType.AntiAlias, 0);
                    Cv.Ellipse(img, new CvPoint(dx + 115, dy + 70), new CvSize(30, 20), 0, 0, 360, CvColor.Black, -1, LineType.AntiAlias, 0);
                    Cv.Ellipse(img, new CvPoint(dx + 185, dy + 70), new CvSize(30, 20), 0, 0, 360, CvColor.Black, -1, LineType.AntiAlias, 0);
                    Cv.Ellipse(img, new CvPoint(dx + 115, dy + 70), new CvSize(15, 15), 0, 0, 360, CvColor.White, -1, LineType.AntiAlias, 0);
                    Cv.Ellipse(img, new CvPoint(dx + 185, dy + 70), new CvSize(15, 15), 0, 0, 360, CvColor.White, -1, LineType.AntiAlias, 0);
                    Cv.Ellipse(img, new CvPoint(dx + 115, dy + 70), new CvSize(5, 5), 0, 0, 360, CvColor.Black, -1, LineType.AntiAlias, 0);
                    Cv.Ellipse(img, new CvPoint(dx + 185, dy + 70), new CvSize(5, 5), 0, 0, 360, CvColor.Black, -1, LineType.AntiAlias, 0);
                    Cv.Ellipse(img, new CvPoint(dx + 150, dy + 100), new CvSize(10, 5), 0, 0, 360, CvColor.Black, -1, LineType.AntiAlias, 0);
                    Cv.Ellipse(img, new CvPoint(dx + 150, dy + 150), new CvSize(40, 10), 0, 0, 360, CvColor.Black, -1, LineType.AntiAlias, 0);
                    Cv.Ellipse(img, new CvPoint(dx + 27, dy + 100), new CvSize(20, 35), 0, 0, 360, CvColor.White, -1, LineType.AntiAlias, 0);
                    Cv.Ellipse(img, new CvPoint(dx + 273, dy + 100), new CvSize(20, 35), 0, 0, 360, CvColor.White, -1, LineType.AntiAlias, 0);
                }

                // 輪郭の検出
                CvSeq<CvPoint> contours;
                CvMemStorage storage = new CvMemStorage();
                // native style
                Cv.FindContours(img, storage, out contours, CvContour.SizeOf, ContourRetrieval.Tree, ContourChain.ApproxSimple);
                contours = Cv.ApproxPoly(contours, CvContour.SizeOf, storage, ApproxPolyMethod.DP, 3, true);
                
                // wrapper style
                //img.FindContours(storage, out contours, ContourRetrieval.Tree, ContourChain.ApproxSimple);
                //contours = contours.ApproxPoly(storage, ApproxPolyMethod.DP, 3, true);

                // ウィンドウに表示
                using (CvWindow window_image = new CvWindow("image", img))
                using (CvWindow window_contours = new CvWindow("contours"))
                {
                    CvTrackbarCallback onTrackbar = delegate(int pos)
                    {
                        IplImage cnt_img = new IplImage(SIZE, SIZE, BitDepth.U8, 3);
                        CvSeq<CvPoint> _contours = contours;
                        int levels = pos - 3;
                        if (levels <= 0) // get to the nearest face to make it look more funny
                        {
                            //_contours = _contours.HNext.HNext.HNext;
                        }
                        cnt_img.Zero();
                        Cv.DrawContours(cnt_img, _contours, CvColor.Red, CvColor.Green, levels, 3, LineType.AntiAlias);
                        window_contours.ShowImage(cnt_img);
                        cnt_img.Dispose();
                    };
                    window_contours.CreateTrackbar("levels+3", 3, 7, onTrackbar);
                    onTrackbar(3);

                    Cv.WaitKey();
                }
            }

        }
Пример #11
0
        public unsafe Kalman()
        {
            // cvKalmanPredict, cvKalmanCorrect
            // カルマンフィルタを用いて回転する点を追跡する

            // A matrix data
            float[] A = new float[] { 1, 1, 0, 1 };

            using (IplImage img = new IplImage(500, 500, BitDepth.U8, 3))
            using (CvKalman kalman = new CvKalman(2, 1, 0))
            using (CvWindow window = new CvWindow("Kalman", WindowMode.AutoSize))
            {
                // state is (phi, delta_phi) - angle and angle increment
                CvMat state = new CvMat(2, 1, MatrixType.F32C1);
                CvMat process_noise = new CvMat(2, 1, MatrixType.F32C1);
                // only phi (angle) is measured
                CvMat measurement = new CvMat(1, 1, MatrixType.F32C1);

                measurement.SetZero();
                CvRandState rng = new CvRandState(0, 1, -1, DistributionType.Uniform);
                int code = -1;

                for (; ; )
                {
                    Cv.RandSetRange(rng, 0, 0.1, 0);
                    rng.DistType = DistributionType.Normal;

                    Marshal.Copy(A, 0, kalman.TransitionMatrix.Data, A.Length);
                    kalman.MeasurementMatrix.SetIdentity(1);
                    kalman.ProcessNoiseCov.SetIdentity(1e-5);
                    kalman.MeasurementNoiseCov.SetIdentity(1e-1);
                    kalman.ErrorCovPost.SetIdentity(1);
                    // choose random initial state
                    Cv.Rand(rng, kalman.StatePost);
                    rng.DistType = DistributionType.Normal;

                    for (; ; )
                    {
                        float state_angle = state.DataSingle[0];
                        CvPoint state_pt = CalcPoint(img, state_angle);

                        // predict point position
                        CvMat prediction = kalman.Predict(null);
                        float predict_angle = prediction.DataSingle[0];
                        CvPoint predict_pt = CalcPoint(img, predict_angle);

                        Cv.RandSetRange(rng, 0, Math.Sqrt(kalman.MeasurementNoiseCov.DataSingle[0]), 0);
                        Cv.Rand(rng, measurement);

                        // generate measurement
                        Cv.MatMulAdd(kalman.MeasurementMatrix, state, measurement, measurement);

                        float measurement_angle = measurement.DataArraySingle[0];
                        CvPoint measurement_pt = CalcPoint(img, measurement_angle);

                        img.SetZero();
                        DrawCross(img, state_pt, CvColor.White, 3);
                        DrawCross(img, measurement_pt, CvColor.Red, 3);
                        DrawCross(img, predict_pt, CvColor.Green, 3);
                        img.Line(state_pt, measurement_pt, new CvColor(255, 0, 0), 3, LineType.AntiAlias, 0);
                        img.Line(state_pt, predict_pt, new CvColor(255, 255, 0), 3, LineType.AntiAlias, 0);

                        // adjust Kalman filter state
                        kalman.Correct(measurement);

                        Cv.RandSetRange(rng, 0, Math.Sqrt(kalman.ProcessNoiseCov.DataSingle[0]), 0);
                        Cv.Rand(rng, process_noise);
                        Cv.MatMulAdd(kalman.TransitionMatrix, state, process_noise, state);

                        window.ShowImage(img);
                        // break current simulation by pressing a key
                        code = CvWindow.WaitKey(100);
                        if (code > 0)
                        {
                            break;
                        }
                    }
                    // exit by ESCAPE
                    if (code == 27)
                    {
                        break;
                    }
                }
            }
        }
Пример #12
0
        public SURFSample()
        {
            // cvExtractSURF
            // SURFで対応点検出            


            // call cv::initModule_nonfree() before using SURF/SIFT.
            CvCpp.InitModule_NonFree();


            using (IplImage obj = Cv.LoadImage(Const.ImageSurfBox, LoadMode.GrayScale))
            using (IplImage image = Cv.LoadImage(Const.ImageSurfBoxinscene, LoadMode.GrayScale))
            using (IplImage objColor = Cv.CreateImage(obj.Size, BitDepth.U8, 3))
            using (IplImage correspond = Cv.CreateImage(new CvSize(image.Width, obj.Height + image.Height), BitDepth.U8, 1))
            {
                Cv.CvtColor(obj, objColor, ColorConversion.GrayToBgr);

                Cv.SetImageROI(correspond, new CvRect(0, 0, obj.Width, obj.Height));
                Cv.Copy(obj, correspond);
                Cv.SetImageROI(correspond, new CvRect(0, obj.Height, correspond.Width, correspond.Height));
                Cv.Copy(image, correspond);
                Cv.ResetImageROI(correspond);

                // SURFの処理
                CvSURFPoint[] objectKeypoints, imageKeypoints;
                float[][] objectDescriptors, imageDescriptors;
                Stopwatch watch = Stopwatch.StartNew();
                {
                    CvSURFParams param = new CvSURFParams(500, true);
                    Cv.ExtractSURF(obj, null, out objectKeypoints, out objectDescriptors, param);
                    Console.WriteLine("Object Descriptors: {0}", objectDescriptors.Length);
                    Cv.ExtractSURF(image, null, out imageKeypoints, out imageDescriptors, param);
                    Console.WriteLine("Image Descriptors: {0}", imageDescriptors.Length);
                }
                watch.Stop();
                Console.WriteLine("Extraction time = {0}ms", watch.ElapsedMilliseconds);
                watch.Reset();
                watch.Start();

                // シーン画像にある局所画像の領域を線で囲む
                CvPoint[] srcCorners = new CvPoint[4]
                    {
                        new CvPoint(0, 0), new CvPoint(obj.Width, 0), new CvPoint(obj.Width, obj.Height), new CvPoint(0, obj.Height)
                    };
                CvPoint[] dstCorners = LocatePlanarObject(objectKeypoints, objectDescriptors, imageKeypoints, imageDescriptors, srcCorners);
                if (dstCorners != null)
                {
                    for (int i = 0; i < 4; i++)
                    {
                        CvPoint r1 = dstCorners[i%4];
                        CvPoint r2 = dstCorners[(i + 1)%4];
                        Cv.Line(correspond, new CvPoint(r1.X, r1.Y + obj.Height), new CvPoint(r2.X, r2.Y + obj.Height), CvColor.White);
                    }
                }

                // 対応点同士を線で引く
                int[] ptPairs = FindPairs(objectKeypoints, objectDescriptors, imageKeypoints, imageDescriptors);
                for (int i = 0; i < ptPairs.Length; i += 2)
                {
                    CvSURFPoint r1 = objectKeypoints[ptPairs[i]];
                    CvSURFPoint r2 = imageKeypoints[ptPairs[i + 1]];
                    Cv.Line(correspond, r1.Pt, new CvPoint(Cv.Round(r2.Pt.X), Cv.Round(r2.Pt.Y + obj.Height)), CvColor.White);
                }

                // 特徴点の場所に円を描く
                for (int i = 0; i < objectKeypoints.Length; i++)
                {
                    CvSURFPoint r = objectKeypoints[i];
                    CvPoint center = new CvPoint(Cv.Round(r.Pt.X), Cv.Round(r.Pt.Y));
                    int radius = Cv.Round(r.Size*(1.2/9.0)*2);
                    Cv.Circle(objColor, center, radius, CvColor.Red, 1, LineType.AntiAlias, 0);
                }
                watch.Stop();
                Console.WriteLine("Drawing time = {0}ms", watch.ElapsedMilliseconds);

                // ウィンドウに表示
                using (CvWindow windowObject = new CvWindow("Object", WindowMode.AutoSize))
                using (CvWindow windowCorrespond = new CvWindow("Object Correspond", WindowMode.AutoSize))
                {
                    windowObject.ShowImage(correspond);
                    windowCorrespond.ShowImage(objColor);
                    Cv.WaitKey(0);
                }
            }
        }
Пример #13
0
        public static CameraCalibrationData CalibrateLens(List<Bitmap> bitmaps, int calibrationTakes)
        {
            int ImageNum = calibrationTakes;
            const int PatRow = 7;
            const int PatCol = 10;
            const int PatSize = PatRow * PatCol;
            int AllPoints = ImageNum * PatSize;
            const float ChessSize = 24.0f;

            // Convert bitmaps into Ipl Images
            IplImage[] srcImg = new IplImage[bitmaps.Count];
            for (int i = 0; i < bitmaps.Count; i++)
                srcImg[i] = OpenCVUtil.IplImageFromBitmap(bitmaps[i]);

            CvPoint3D32f[, ,] objects = new CvPoint3D32f[ImageNum, PatRow, PatCol];
            for (int i = 0; i < ImageNum; i++)
            {
                for (int j = 0; j < PatRow; j++)
                {
                    for (int k = 0; k < PatCol; k++)
                    {
                        objects[i, j, k] = new CvPoint3D32f
                        {
                            X = j * ChessSize,
                            Y = k * ChessSize,
                            Z = 0.0f
                        };
                    }
                }
            }
            CvMat objectPoints = new CvMat(AllPoints, 3, MatrixType.F32C1, objects);

            CvSize patternSize = new CvSize(PatCol, PatRow);

            int foundNum = 0;
            List<CvPoint2D32f> allCorners = new List<CvPoint2D32f>(AllPoints);
            int[] pointCountsValue = new int[ImageNum];
            using (CvWindow window = new CvWindow("Calibration", WindowMode.AutoSize))
            {
                for (int i = 0; i < ImageNum; i++)
                {
                    CvPoint2D32f[] corners;
                    bool found = Cv.FindChessboardCorners(srcImg[i], patternSize, out corners);
                    Debug.Print("{0:D2}...", i);
                    if (found)
                    {
                        Debug.Print("ok");
                        foundNum++;
                    }
                    else
                    {
                        Debug.Print("fail");
                    }

                    using (IplImage srcGray = new IplImage(srcImg[i].Size, BitDepth.U8, 1))
                    {
                        Cv.CvtColor(srcImg[i], srcGray, ColorConversion.BgrToGray);
                        Cv.FindCornerSubPix(srcGray, corners, corners.Length, new CvSize(3, 3), new CvSize(-1, -1), new CvTermCriteria(20, 0.03));
                        Cv.DrawChessboardCorners(srcImg[i], patternSize, corners, found);
                        pointCountsValue[i] = corners.Length;

                        window.ShowImage(srcImg[i]);
                        //Cv.WaitKey(0);
                    }
                    allCorners.AddRange(corners);
                }
                if (foundNum != ImageNum)
                {
                    Debug.Assert(false);
                }
            }

            CvMat imagePoints = new CvMat(AllPoints, 1, MatrixType.F32C2, allCorners.ToArray());
            CvMat pointCounts = new CvMat(ImageNum, 1, MatrixType.S32C1, pointCountsValue);

            CvMat intrinsic = new CvMat(3, 3, MatrixType.F64C1);
            CvMat distortion = new CvMat(1, 4, MatrixType.F64C1);
            CvMat rotation = new CvMat(ImageNum, 3, MatrixType.F64C1);
            CvMat translation = new CvMat(ImageNum, 3, MatrixType.F64C1);

            Cv.CalibrateCamera2(objectPoints, imagePoints, pointCounts, srcImg[0].Size, intrinsic, distortion, rotation, translation, CalibrationFlag.Default);

            CvMat subImagePoints, subObjectPoints;
            Cv.GetRows(imagePoints, out subImagePoints, 0, PatSize);
            Cv.GetRows(objectPoints, out subObjectPoints, 0, PatSize);
            CvMat rotation_ = new CvMat(1, 3, MatrixType.F32C1);
            CvMat translation_ = new CvMat(1, 3, MatrixType.F32C1);

            Cv.FindExtrinsicCameraParams2(subObjectPoints, subImagePoints, intrinsic, distortion, rotation_, translation_, false);
            //Cv.FindExtrinsicCameraParams2_(subObjectPoints, subImagePoints, intrinsic, distortion, rotation_, translation_, false);

            // Free IplImages
            foreach (IplImage img in srcImg)
                img.Dispose();

            // Construct and return camera calibration data
            CameraCalibrationData camCal = new CameraCalibrationData();
            camCal.IsValid = foundNum > (ImageNum / 2); // Consider valid if at least half were successes
            camCal.Intrinsic = intrinsic;
            camCal.Translation = translation_;
            camCal.Rotation = rotation_;
            camCal.Distortion = distortion;

            return camCal;
        }
Пример #14
0
        public CalibrateCamera()
        {
            const int ImageNum = 3; 
            const int PatRow = 7; 
            const int PatCol = 10; 
            const int PatSize = PatRow * PatCol;
            const int AllPoints = ImageNum * PatSize;
            const float ChessSize = 24.0f;            

            IplImage[] srcImg = new IplImage[ImageNum];
            for (int i = 0; i < ImageNum; i++)
            {
                srcImg[i] = new IplImage(string.Format(FilePath.Image.Calibration, i), LoadMode.Color);
            }

            CvPoint3D32f[,,] objects = new CvPoint3D32f[ImageNum, PatRow, PatCol];
            for (int i = 0; i < ImageNum; i++)
            {
                for (int j = 0; j < PatRow; j++)
                {
                    for (int k = 0; k < PatCol; k++)
                    {
                        objects[i, j, k] = new CvPoint3D32f
                        {
                            X = j * ChessSize,
                            Y = k * ChessSize,
                            Z = 0.0f
                        };
                    }
                }
            }
            CvMat objectPoints = new CvMat(AllPoints, 3, MatrixType.F32C1, objects);

            CvSize patternSize = new CvSize(PatCol, PatRow);

            int foundNum = 0;
            List<CvPoint2D32f> allCorners = new List<CvPoint2D32f>(AllPoints);
            int[] pointCountsValue = new int[ImageNum];
            using (CvWindow window = new CvWindow("Calibration", WindowMode.AutoSize))
            {
                for (int i = 0; i < ImageNum; i++)
                {
                    CvPoint2D32f[] corners;
                    bool found = Cv.FindChessboardCorners(srcImg[i], patternSize, out corners);
                    Debug.Print("{0:D2}...", i);
                    if (found)
                    {
                        Debug.Print("ok");
                        foundNum++;
                    }
                    else
                    {
                        Debug.Print("fail");
                    }

                    using (IplImage srcGray = new IplImage(srcImg[i].Size, BitDepth.U8, 1))
                    {
                        Cv.CvtColor(srcImg[i], srcGray, ColorConversion.BgrToGray);
                        Cv.FindCornerSubPix(srcGray, corners, corners.Length, new CvSize(3, 3), new CvSize(-1, -1), new CvTermCriteria(20, 0.03));
                        Cv.DrawChessboardCorners(srcImg[i], patternSize, corners, found);
                        pointCountsValue[i] = corners.Length;

                        window.ShowImage(srcImg[i]);
                        Cv.WaitKey(0);
                    }
                    allCorners.AddRange(corners);
                }
                if (foundNum != ImageNum)
                {
                    Debug.Assert(false);
                }
            }
 
            CvMat imagePoints = new CvMat(AllPoints, 1, MatrixType.F32C2, allCorners.ToArray());
            CvMat pointCounts = new CvMat(ImageNum, 1, MatrixType.S32C1, pointCountsValue);

            CvMat intrinsic = new CvMat(3, 3, MatrixType.F64C1);
            CvMat distortion = new CvMat(1, 4, MatrixType.F64C1);
            CvMat rotation = new CvMat(ImageNum, 3, MatrixType.F64C1);
            CvMat translation = new CvMat(ImageNum, 3, MatrixType.F64C1);

            Cv.CalibrateCamera2(objectPoints, imagePoints, pointCounts, srcImg[0].Size, intrinsic, distortion, rotation, translation, CalibrationFlag.Default);

            CvMat subImagePoints, subObjectPoints;
            Cv.GetRows(imagePoints, out subImagePoints, 0, PatSize);
            Cv.GetRows(objectPoints, out subObjectPoints, 0, PatSize);
            CvMat rotation_ = new CvMat(1, 3, MatrixType.F32C1);
            CvMat translation_ = new CvMat(1, 3, MatrixType.F32C1);

            Cv.FindExtrinsicCameraParams2(subObjectPoints, subImagePoints, intrinsic, distortion, rotation_, translation_, false);
            //Cv.FindExtrinsicCameraParams2_(subObjectPoints, subImagePoints, intrinsic, distortion, rotation_, translation_, false);

            using (var fs = new CvFileStorage("camera.xml", null, OpenCvSharp.FileStorageMode.Write))
            {
                fs.Write("intrinsic", intrinsic);
                fs.Write("rotation", rotation_);
                fs.Write("translation", translation_);
                fs.Write("distortion", distortion);
            }

            foreach (IplImage img in srcImg)
            {
                img.Dispose();
            }

            // 書き込んだファイルを表示
            Console.WriteLine(File.ReadAllText("camera.xml"));
            Console.Read();
        }