Esempio n. 1
0
        public DistTransform()
        {
            // cvDistTransform
            // 入力画像に対して距離変換を行ない,結果を0-255に正規化し可視化する

            // (1)画像を読み込み
            using (IplImage src = new IplImage(Const.ImageLenna, LoadMode.GrayScale))
            {
                if (src.Depth != BitDepth.U8)
                {
                    throw new Exception("Invalid Depth");
                }
                // (2)処理結果の距離画像出力用の画像領域と表示ウィンドウを確保
                using (IplImage dst = new IplImage(src.Size, BitDepth.F32, 1))
                    using (IplImage dstNorm = new IplImage(src.Size, BitDepth.U8, 1))
                    {
                        // (3)距離画像を計算し,表示用に結果を0-255に正規化する
                        Cv.DistTransform(src, dst, DistanceType.L2, 3, null, null);
                        Cv.Normalize(dst, dstNorm, 0.0, 255.0, NormType.MinMax, null);

                        // (4)距離画像を表示,キーが押されたときに終了
                        using (new CvWindow("Source", WindowMode.AutoSize, src))
                            using (new CvWindow("Distance Image", WindowMode.AutoSize, dstNorm))
                            {
                                CvWindow.WaitKey(0);
                            }
                    }
            }
        }
Esempio n. 2
0
        public Text()
        {
            // cvInitFont, cvPutText
            // フォントを初期化して,テキストを描画する

            List <FontFace> font_face = new List <FontFace>(
                (FontFace[])Enum.GetValues(typeof(FontFace))
                );

            font_face.Remove(FontFace.Italic);

            // (1)画像を確保し初期化する
            using (IplImage img = Cv.CreateImage(new CvSize(450, 600), BitDepth.U8, 3))
            {
                Cv.Zero(img);
                // (2)フォント構造体を初期化する
                CvFont[] font = new CvFont[font_face.Count * 2];
                for (int i = 0; i < font.Length; i += 2)
                {
                    font[i]     = new CvFont(font_face[i / 2], 1.0, 1.0);
                    font[i + 1] = new CvFont(font_face[i / 2] | FontFace.Italic, 1.0, 1.0);
                }
                // (3)フォントを指定して,テキストを描画する
                for (int i = 0; i < font.Length; i++)
                {
                    CvColor rcolor = CvColor.Random();
                    Cv.PutText(img, "OpenCV sample code", new CvPoint(15, (i + 1) * 30), font[i], rcolor);
                }
                // (4)画像の表示,キーが押されたときに終了
                using (CvWindow w = new CvWindow(img))
                {
                    CvWindow.WaitKey(0);
                }
            }
        }
Esempio n. 3
0
        public HoughCircles()
        {
            using (IplImage imgSrc = new IplImage(FilePath.Image.Walkman, LoadMode.Color))
                using (IplImage imgGray = new IplImage(imgSrc.Size, BitDepth.U8, 1))
                    using (IplImage imgHough = imgSrc.Clone())
                    {
                        Cv.CvtColor(imgSrc, imgGray, ColorConversion.BgrToGray);
                        Cv.Smooth(imgGray, imgGray, SmoothType.Gaussian, 9);
                        //Cv.Canny(imgGray, imgGray, 75, 150, ApertureSize.Size3);

                        using (var storage = new CvMemStorage())
                        {
                            CvSeq <CvCircleSegment> seq = imgGray.HoughCircles(storage, HoughCirclesMethod.Gradient, 1, 100, 150, 55, 0, 0);
                            foreach (CvCircleSegment item in seq)
                            {
                                imgHough.Circle(item.Center, (int)item.Radius, CvColor.Red, 3);
                            }
                        }

                        using (new CvWindow("gray", WindowMode.AutoSize, imgGray))
                            using (new CvWindow("Hough circles", WindowMode.AutoSize, imgHough))
                            {
                                CvWindow.WaitKey(0);
                            }
                    }
        }
Esempio n. 4
0
        public EyeDetect()
        {
            CvColor[] colors = new CvColor[] {
                new CvColor(0, 0, 255),
                new CvColor(0, 128, 255),
                new CvColor(0, 255, 255),
                new CvColor(0, 255, 0),
                new CvColor(255, 128, 0),
                new CvColor(255, 255, 0),
                new CvColor(255, 0, 0),
                new CvColor(255, 0, 255),
            };

            const double Scale        = 1.25;
            const double ScaleFactor  = 2.5;
            const int    MinNeighbors = 2;

            using (CvCapture cap = CvCapture.FromCamera(1))
                using (CvWindow w = new CvWindow("Eye Tracker"))
                {
                    while (CvWindow.WaitKey(10) < 0)
                    {
                        using (IplImage img = cap.QueryFrame())
                            using (IplImage smallImg = new IplImage(new CvSize(Cv.Round(img.Width / Scale), Cv.Round(img.Height / Scale)), BitDepth.U8, 1))
                            {
                                using (IplImage gray = new IplImage(img.Size, BitDepth.U8, 1))
                                {
                                    Cv.CvtColor(img, gray, ColorConversion.BgrToGray);
                                    Cv.Resize(gray, smallImg, Interpolation.Linear);
                                    Cv.EqualizeHist(smallImg, smallImg);
                                }

                                using (CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile("C:\\Program Files\\OpenCV\\data\\haarcascades\\haarcascade_eye.xml"))
                                    using (CvMemStorage storage = new CvMemStorage())
                                    {
                                        storage.Clear();

                                        Stopwatch         watch = Stopwatch.StartNew();
                                        CvSeq <CvAvgComp> eyes  = Cv.HaarDetectObjects(smallImg, cascade, storage, ScaleFactor, MinNeighbors, 0, new CvSize(30, 30));
                                        watch.Stop();
                                        //Console.WriteLine("detection time = {0}msn", watch.ElapsedMilliseconds);

                                        for (int i = 0; i < eyes.Total; i++)
                                        {
                                            CvRect  r      = eyes[i].Value.Rect;
                                            CvPoint center = new CvPoint
                                            {
                                                X = Cv.Round((r.X + r.Width * 0.5) * Scale),
                                                Y = Cv.Round((r.Y + r.Height * 0.5) * Scale)
                                            };
                                            int radius = Cv.Round((r.Width + r.Height) * 0.25 * Scale);
                                            img.Circle(center, radius, colors[i % 8], 3, LineType.AntiAlias, 0);
                                        }
                                    }

                                w.Image = img;
                            }
                    }
                }
        }
Esempio n. 5
0
        public DistTransform()
        {
            // cvDistTransform

            using (var src = new IplImage(FilePath.Image.Lenna, LoadMode.GrayScale))
            {
                if (src.Depth != BitDepth.U8)
                {
                    throw new Exception("Invalid Depth");
                }

                using (var dst = new IplImage(src.Size, BitDepth.F32, 1))
                    using (var dstNorm = new IplImage(src.Size, BitDepth.U8, 1))
                    {
                        Cv.DistTransform(src, dst, DistanceType.L2, 3, null, null);
                        Cv.Normalize(dst, dstNorm, 0.0, 255.0, NormType.MinMax, null);

                        using (new CvWindow("Source", WindowMode.AutoSize, src))
                            using (new CvWindow("Distance Image", WindowMode.AutoSize, dstNorm))
                            {
                                CvWindow.WaitKey(0);
                            }
                    }
            }
        }
Esempio n. 6
0
        public Undistort()
        {
            using (IplImage srcImg = new IplImage(FilePath.Image.Distortion, LoadMode.Color))
                using (IplImage dstImg = srcImg.Clone())
                {
                    CvMat intrinsic, distortion;
                    using (CvFileStorage fs = new CvFileStorage(FilePath.Text.Camera, null, FileStorageMode.Read))
                    {
                        CvFileNode param = fs.GetFileNodeByName(null, "intrinsic");
                        intrinsic  = fs.Read <CvMat>(param);
                        param      = fs.GetFileNodeByName(null, "distortion");
                        distortion = fs.Read <CvMat>(param);
                    }

                    Cv.Undistort2(srcImg, dstImg, intrinsic, distortion);

                    using (new CvWindow("Distortion", WindowMode.AutoSize, srcImg))
                        using (new CvWindow("Undistortion", WindowMode.AutoSize, dstImg))
                        {
                            CvWindow.WaitKey(0);
                        }

                    intrinsic.Dispose();
                    distortion.Dispose();
                }
        }
Esempio n. 7
0
 public Snake()
 {
     using (var src = new IplImage(FilePath.Image.Cake, LoadMode.GrayScale))
         using (var dst = new IplImage(src.Size, BitDepth.U8, 3))
         {
             CvPoint[] contour = new CvPoint[100];
             CvPoint   center  = new CvPoint(src.Width / 2, src.Height / 2);
             for (int i = 0; i < contour.Length; i++)
             {
                 contour[i].X = (int)(center.X * Math.Cos(2 * Math.PI * i / contour.Length) + center.X);
                 contour[i].Y = (int)(center.Y * Math.Sin(2 * Math.PI * i / contour.Length) + center.Y);
             }
             Console.WriteLine("Press any key to snake\nEsc - quit");
             using (var window = new CvWindow())
             {
                 while (true)
                 {
                     src.SnakeImage(contour, 0.45f, 0.35f, 0.2f, new CvSize(15, 15), new CvTermCriteria(1), true);
                     src.CvtColor(dst, ColorConversion.GrayToRgb);
                     for (int i = 0; i < contour.Length - 1; i++)
                     {
                         dst.Line(contour[i], contour[i + 1], new CvColor(255, 0, 0), 2);
                     }
                     dst.Line(contour[contour.Length - 1], contour[0], new CvColor(255, 0, 0), 2);
                     window.Image = dst;
                     int key = CvWindow.WaitKey();
                     if (key == 27)
                     {
                         break;
                     }
                 }
             }
         }
 }
Esempio n. 8
0
        public VideoWriter()
        {
            // (1)カメラに対するキャプチャ構造体を作成する
            using (CvCapture capture = CvCapture.FromCamera(0))
            {
                // (2)キャプチャサイズを取得する(この設定は,利用するカメラに依存する)
                int    width  = capture.FrameWidth;
                int    height = capture.FrameHeight;
                double fps    = 15;//capture.Fps;
                // (3)ビデオライタ構造体を作成する
                using (CvVideoWriter writer = new CvVideoWriter("cap.avi", FourCC.Prompt, fps, new CvSize(width, height)))
                    using (CvFont font = new CvFont(FontFace.HersheyComplex, 0.7, 0.7))
                        using (CvWindow window = new CvWindow("Capture", WindowMode.AutoSize))
                        {
                            // (4)カメラから画像をキャプチャし,ファイルに書き出す
                            for (int frames = 0; ; frames++)
                            {
                                IplImage frame = capture.QueryFrame();
                                string   str   = string.Format("{0}[frame]", frames);
                                frame.PutText(str, new CvPoint(10, 20), font, new CvColor(0, 255, 100));
                                writer.WriteFrame(frame);
                                window.ShowImage(frame);

                                int key = CvWindow.WaitKey((int)(1000 / fps));
                                if (key == '\x1b')
                                {
                                    break;
                                }
                            }
                        }
            }
        }
        public void Process(CvCapture capture)
        {
            FrameCounter = 0;
            while (true)
            {
                FrameCounter++;
                var frame = capture.QueryFrame();
                if (frame == null)
                {
                    break;
                }

                if (FrameCounter > FrameSkipValue)
                {
                    var method = new ProcessIplImage(Process);
                    method.BeginInvoke(frame, null, null);
                    FrameCounter = 0;
                }

                var key = CvWindow.WaitKey(33);
                if (key == 27)
                {
                    break;
                }
            }
        }
Esempio n. 10
0
        public Undistort()
        {
            // cvUndistort2
            // キャリブレーションデータを利用して,歪みを補正する

            // (1)補正対象となる画像の読み込み
            using (IplImage srcImg = new IplImage(Const.ImageDistortion, LoadMode.Color))
                using (IplImage dstImg = srcImg.Clone())
                {
                    // (2)パラメータファイルの読み込み
                    CvMat intrinsic, distortion;
                    using (CvFileStorage fs = new CvFileStorage(Const.XmlCamera, null, FileStorageMode.Read))
                    {
                        CvFileNode param = fs.GetFileNodeByName(null, "intrinsic");
                        intrinsic  = fs.Read <CvMat>(param);
                        param      = fs.GetFileNodeByName(null, "distortion");
                        distortion = fs.Read <CvMat>(param);
                    }

                    // (3)歪み補正
                    Cv.Undistort2(srcImg, dstImg, intrinsic, distortion);

                    // (4)画像を表示,キーが押されたときに終了
                    using (CvWindow w1 = new CvWindow("Distortion", WindowMode.AutoSize, srcImg))
                        using (CvWindow w2 = new CvWindow("Undistortion", WindowMode.AutoSize, dstImg))
                        {
                            CvWindow.WaitKey(0);
                        }

                    intrinsic.Dispose();
                    distortion.Dispose();
                }
        }
Esempio n. 11
0
        public Contour()
        {
            // cvContourArea, cvArcLength
            // 輪郭によって区切られた領域の面積と,輪郭の長さを求める

            const int SIZE = 500;

            // (1)画像を確保し初期化する
            using (CvMemStorage storage = new CvMemStorage())
                using (IplImage img = new IplImage(SIZE, SIZE, BitDepth.U8, 3))
                {
                    img.Zero();
                    // (2)点列を生成する
                    CvSeq <CvPoint> points = new CvSeq <CvPoint>(SeqType.PolyLine, storage);
                    CvRNG           rng    = new CvRNG((ulong)DateTime.Now.Ticks);
                    double          scale  = rng.RandReal() + 0.5;
                    CvPoint         pt0    = new CvPoint
                    {
                        X = (int)(Math.Cos(0) * SIZE / 4 * scale + SIZE / 2),
                        Y = (int)(Math.Sin(0) * SIZE / 4 * scale + SIZE / 2)
                    };
                    img.Circle(pt0, 2, CvColor.Green);
                    points.Push(pt0);
                    for (int i = 1; i < 20; i++)
                    {
                        scale = rng.RandReal() + 0.5;
                        CvPoint pt1 = new CvPoint
                        {
                            X = (int)(Math.Cos(i * 2 * Math.PI / 20) * SIZE / 4 * scale + SIZE / 2),
                            Y = (int)(Math.Sin(i * 2 * Math.PI / 20) * SIZE / 4 * scale + SIZE / 2)
                        };
                        img.Line(pt0, pt1, CvColor.Green, 2);
                        pt0.X = pt1.X;
                        pt0.Y = pt1.Y;
                        img.Circle(pt0, 3, CvColor.Green, Cv.FILLED);
                        points.Push(pt0);
                    }
                    img.Line(pt0, points.GetSeqElem(0).Value, CvColor.Green, 2);
                    // (3)包含矩形,面積,長さを求める
                    CvRect rect   = points.BoundingRect(false);
                    double area   = points.ContourArea();
                    double length = points.ArcLength(CvSlice.WholeSeq, 1);
                    // (4)結果を画像に書き込む
                    img.Rectangle(new CvPoint(rect.X, rect.Y), new CvPoint(rect.X + rect.Width, rect.Y + rect.Height), CvColor.Red, 2);
                    string text_area   = string.Format("Area:   wrect={0}, contour={1}", rect.Width * rect.Height, area);
                    string text_length = string.Format("Length: rect={0}, contour={1}", 2 * (rect.Width + rect.Height), length);
                    using (CvFont font = new CvFont(FontFace.HersheySimplex, 0.7, 0.7, 0, 1, LineType.AntiAlias))
                    {
                        img.PutText(text_area, new CvPoint(10, img.Height - 30), font, CvColor.White);
                        img.PutText(text_length, new CvPoint(10, img.Height - 10), font, CvColor.White);
                    }
                    // (5)画像を表示,キーが押されたときに終了
                    using (CvWindow window = new CvWindow("BoundingRect", WindowMode.AutoSize))
                    {
                        window.Image = img;
                        CvWindow.WaitKey(0);
                    }
                }
        }
Esempio n. 12
0
 // shows a live view of the current web cam
 private static void Live(CvCapture cap, CvWindow winScr)
 {
     while (CvWindow.WaitKey(10) != 27)
     {
         IplImage src = cap.QueryFrame();
         winScr.Image = src;
     }
 }
Esempio n. 13
0
        public Watershed()
        {
            // cvWatershed
            // マウスで円形のマーカー(シード領域)の中心を指定し,複数のマーカーを設定する.
            // このマーカを画像のgradientに沿って広げて行き,gradientの高い部分に出来る境界を元に領域を分割する.
            // 領域は,最初に指定したマーカーの数に分割される.

            // (2)画像の読み込み,マーカー画像の初期化,結果表示用画像領域の確保を行なう
            using (IplImage srcImg = new IplImage(Const.ImageGoryokaku, LoadMode.AnyDepth | LoadMode.AnyColor))
                using (IplImage dstImg = srcImg.Clone())
                    using (IplImage dspImg = srcImg.Clone())
                        using (IplImage markers = new IplImage(srcImg.Size, BitDepth.S32, 1))
                        {
                            markers.Zero();

                            // (3)入力画像を表示しシードコンポーネント指定のためのマウスイベントを登録する
                            using (CvWindow wImage = new CvWindow("image", WindowMode.AutoSize))
                            {
                                wImage.Image = srcImg;
                                // クリックにより中心を指定し,円形のシード領域を設定する
                                int seedNum = 0;
                                wImage.OnMouseCallback += delegate(MouseEvent ev, int x, int y, MouseEvent flags)
                                {
                                    if (ev == MouseEvent.LButtonDown)
                                    {
                                        seedNum++;
                                        CvPoint pt = new CvPoint(x, y);
                                        markers.Circle(pt, 20, CvScalar.ScalarAll(seedNum), Cv.FILLED, LineType.Link8, 0);
                                        dspImg.Circle(pt, 20, CvColor.White, 3, LineType.Link8, 0);
                                        wImage.Image = dspImg;
                                    }
                                };
                                CvWindow.WaitKey();
                            }

                            // (4)watershed分割を実行する
                            Cv.Watershed(srcImg, markers);

                            // (5)実行結果の画像中のwatershed境界(ピクセル値=-1)を結果表示用画像上に表示する
                            for (int i = 0; i < markers.Height; i++)
                            {
                                for (int j = 0; j < markers.Width; j++)
                                {
                                    int idx = (int)(markers.Get2D(i, j).Val0);
                                    if (idx == -1)
                                    {
                                        dstImg.Set2D(i, j, CvColor.Red);
                                    }
                                }
                            }
                            using (CvWindow wDst = new CvWindow("watershed transform", WindowMode.AutoSize))
                            {
                                wDst.Image = dstImg;
                                CvWindow.WaitKey();
                            }
                        }
        }
Esempio n. 14
0
 static void ShowImage(IplImage image)
 {
     using (CvWindow win = new CvWindow("OpenCV Window"))
     {
         win.ShowImage(image);
         CvWindow.WaitKey(0);
     }
     //ShowImages(new IplImage[] { image });
 }
Esempio n. 15
0
        public IplImage InpaintImage(IplImage src)
        {
            inpaint = new IplImage(src.Size, BitDepth.U8, 3);
            IplImage paint = src.Clone();
            IplImage mask  = new IplImage(src.Size, BitDepth.U8, 1);

            CvWindow win_Paint = new CvWindow("Paint", WindowMode.AutoSize, paint);

            CvPoint prevPt = new CvPoint(-1, -1);

            win_Paint.OnMouseCallback += delegate(MouseEvent eve, int x, int y, MouseEvent flag)
            {
                if (eve == MouseEvent.LButtonDown)
                {
                    prevPt = new CvPoint(x, y);
                }
                else if (eve == MouseEvent.LButtonUp || (flag & MouseEvent.FlagLButton) == 0)
                {
                    prevPt = new CvPoint(-1, -1);
                }
                else if (eve == MouseEvent.MouseMove && (flag & MouseEvent.FlagLButton) != 0)
                {
                    CvPoint pt = new CvPoint(x, y);

                    Cv.DrawLine(mask, prevPt, pt, CvColor.White, 5, LineType.AntiAlias, 0);
                    Cv.DrawLine(paint, prevPt, pt, CvColor.White, 5, LineType.AntiAlias, 0);
                    prevPt = pt;
                    win_Paint.ShowImage(paint);
                }
            };

            bool repeat = true;

            while (repeat)
            {
                switch (CvWindow.WaitKey(0))
                {
                case 'r':
                    mask.SetZero();
                    Cv.Copy(src, paint);
                    win_Paint.ShowImage(paint);
                    break;

                case '\r':
                    CvWindow win_Inpaint = new CvWindow("Inpainted", WindowMode.AutoSize);
                    Cv.Inpaint(paint, mask, inpaint, 3, InpaintMethod.NS);
                    win_Inpaint.ShowImage(inpaint);
                    break;

                case (char)27:
                    CvWindow.DestroyAllWindows();
                    repeat = false;
                    break;
                }
            }
            return(inpaint);
        }
Esempio n. 16
0
 public CaptureCamera()
 {
     using (CvCapture cap = CvCapture.FromCamera(0)) // device type + camera index
         using (CvWindow w = new CvWindow("SampleCapture"))
         {
             while (CvWindow.WaitKey(10) < 0)
             {
                 w.Image = cap.QueryFrame();
             }
         }
 }
Esempio n. 17
0
 // use circle detection only
 private static void CircleOnly(CvCapture cap, CvWindow winScr)
 {
     srcImage       = PerspectiveCorretoin.GetCorrectedImage(cap.QueryFrame());
     gray           = new IplImage(srcImage.Size, BitDepth.U8, 1);
     blurKernelSize = new Size(9, 9);
     while (CvWindow.WaitKey(10) != 27)
     {
         srcImage = PerspectiveCorretoin.GetCorrectedImage(cap.QueryFrame());
         ShowFPS();
         FindCircle(srcImage, winScr);
     }
 }
Esempio n. 18
0
        public Watershed()
        {
            using (var srcImg = new IplImage(FilePath.Image.Goryokaku, LoadMode.AnyDepth | LoadMode.AnyColor))
                using (var dstImg = srcImg.Clone())
                    using (var dspImg = srcImg.Clone())
                        using (var markers = new IplImage(srcImg.Size, BitDepth.S32, 1))
                        {
                            markers.Zero();

                            using (var window = new CvWindow("image", WindowMode.AutoSize))
                            {
                                window.Image = srcImg;
                                // Mouse event
                                int seedNum = 0;
                                window.OnMouseCallback += delegate(MouseEvent ev, int x, int y, MouseEvent flags)
                                {
                                    if (ev == MouseEvent.LButtonDown)
                                    {
                                        seedNum++;
                                        CvPoint pt = new CvPoint(x, y);
                                        markers.Circle(pt, 20, CvScalar.ScalarAll(seedNum), Cv.FILLED, LineType.Link8, 0);
                                        dspImg.Circle(pt, 20, CvColor.White, 3, LineType.Link8, 0);
                                        window.Image = dspImg;
                                    }
                                };
                                CvWindow.WaitKey();
                            }

                            Cv.Watershed(srcImg, markers);

                            // draws watershed
                            for (int i = 0; i < markers.Height; i++)
                            {
                                for (int j = 0; j < markers.Width; j++)
                                {
                                    int idx = (int)(markers.Get2D(i, j).Val0);
                                    if (idx == -1)
                                    {
                                        dstImg.Set2D(i, j, CvColor.Red);
                                    }
                                }
                            }
                            using (CvWindow wDst = new CvWindow("watershed transform", WindowMode.AutoSize))
                            {
                                wDst.Image = dstImg;
                                CvWindow.WaitKey();
                            }
                        }
        }
Esempio n. 19
0
        public BoundingRect()
        {
            // cvBoundingRect
            // 点列を包含する矩形を求める

            // (1)画像とメモリストレージを確保し初期化する
            // (メモリストレージは、CvSeqを使わないのであれば不要)
            using (IplImage img = new IplImage(640, 480, BitDepth.U8, 3))
                using (CvMemStorage storage = new CvMemStorage(0))
                {
                    img.Zero();
                    CvRNG rng = new CvRNG(DateTime.Now);
                    // (2)点列を生成する
                    ///*
                    // お手軽な方法 (普通の配列を使う)
                    CvPoint[] points = new CvPoint[50];
                    for (int i = 0; i < 50; i++)
                    {
                        points[i] = new CvPoint()
                        {
                            X = (int)(rng.RandInt() % (img.Width / 2) + img.Width / 4),
                            Y = (int)(rng.RandInt() % (img.Height / 2) + img.Height / 4)
                        };
                        img.Circle(points[i], 3, new CvColor(0, 255, 0), Cv.FILLED);
                    }
                    //*/

                    /*
                     * // サンプルに準拠した方法 (CvSeqを使う)
                     * CvSeq points = new CvSeq(SeqType.EltypePoint, CvSeq.SizeOf, CvPoint.SizeOf, storage);
                     * for (int i = 0; i < 50; i++) {
                     *  CvPoint pt = new CvPoint();
                     *  pt.X = (int)(rng.RandInt() % (img.Width / 2) + img.Width / 4);
                     *  pt.Y = (int)(rng.RandInt() % (img.Height / 2) + img.Height / 4);
                     *  points.Push(pt);
                     *  img.Circle(pt, 3, new CvColor(0, 255, 0), Cv.FILLED);
                     * }
                     * //*/
                    // (3)点列を包含する矩形を求めて描画する
                    CvRect rect = Cv.BoundingRect(points);
                    img.Rectangle(new CvPoint(rect.X, rect.Y), new CvPoint(rect.X + rect.Width, rect.Y + rect.Height), new CvColor(255, 0, 0), 2);
                    // (4)画像の表示,キーが押されたときに終了
                    using (CvWindow w = new CvWindow("BoundingRect", WindowMode.AutoSize, img))
                    {
                        CvWindow.WaitKey(0);
                    }
                }
        }
Esempio n. 20
0
        static void LeafBorders()
        {
            using (CvWindow win2 = new CvWindow("OpenCv Window2"))
                using (CvWindow win = new CvWindow("OpenCv Window"))
                {
                    using (IplImage src = Cv.LoadImage("Resources/maple_leaf.jpg", LoadMode.Color))
                    {
                        //dst.FindContours()

                        win2.ShowImage(src);
                        win.ShowImage(GetLeafBorders(src));
                    }
                    CvWindow.WaitKey();
                    win.Image.Dispose();
                }
        }
Esempio n. 21
0
        /// <summary>
        /// sample of new C++ style wrapper
        /// </summary>
        private void SampleCpp()
        {
            // (1) Load the image
            using (Mat imgGray = new Mat(FilePath.Image.Goryokaku, LoadMode.GrayScale))
                using (Mat imgStd = new Mat(FilePath.Image.Goryokaku, LoadMode.Color))
                    using (Mat imgProb = imgStd.Clone())
                    {
                        // Preprocess
                        Cv2.Canny(imgGray, imgGray, 50, 200, 3, false);

                        // (3) Run Standard Hough Transform
                        CvLineSegmentPolar[] segStd = Cv2.HoughLines(imgGray, 1, Math.PI / 180, 50, 0, 0);
                        int limit = Math.Min(segStd.Length, 10);
                        for (int i = 0; i < limit; i++)
                        {
                            // Draws result lines
                            float  rho   = segStd[i].Rho;
                            float  theta = segStd[i].Theta;
                            double a     = Math.Cos(theta);
                            double b     = Math.Sin(theta);
                            double x0    = a * rho;
                            double y0    = b * rho;
                            Point  pt1   = new Point {
                                X = Cv.Round(x0 + 1000 * (-b)), Y = Cv.Round(y0 + 1000 * (a))
                            };
                            Point pt2 = new Point {
                                X = Cv.Round(x0 - 1000 * (-b)), Y = Cv.Round(y0 - 1000 * (a))
                            };
                            imgStd.Line(pt1, pt2, Scalar.Red, 3, LineType.AntiAlias, 0);
                        }

                        // (4) Run Probabilistic Hough Transform
                        CvLineSegmentPoint[] segProb = Cv2.HoughLinesP(imgGray, 1, Math.PI / 180, 50, 50, 10);
                        foreach (CvLineSegmentPoint s in segProb)
                        {
                            imgProb.Line(s.P1, s.P2, CvColor.Red, 3, LineType.AntiAlias, 0);
                        }

                        // (5) Show results
                        using (new Window("Hough_line_standard", WindowMode.AutoSize, imgStd))
                            using (new Window("Hough_line_probabilistic", WindowMode.AutoSize, imgProb))
                            {
                                CvWindow.WaitKey(0);
                            }
                    }
        }
Esempio n. 22
0
        public Delaunay()
        {
            CvRect  rect             = new CvRect(0, 0, 600, 600);
            CvColor activeFacetColor = new CvColor(255, 0, 0);
            CvColor delaunayColor    = new CvColor(0, 0, 0);
            CvColor voronoiColor     = new CvColor(0, 180, 0);
            CvColor bkgndColor       = new CvColor(255, 255, 255);
            Random  rand             = new Random();

            using (CvMemStorage storage = new CvMemStorage(0))
                using (IplImage img = new IplImage(rect.Size, BitDepth.U8, 3))
                    using (CvWindow window = new CvWindow("delaunay"))
                    {
                        img.Set(bkgndColor);
                        CvSubdiv2D subdiv = new CvSubdiv2D(rect, storage);
                        for (int i = 0; i < 200; i++)
                        {
                            CvPoint2D32f fp = new CvPoint2D32f
                            {
                                X = (float)rand.Next(5, rect.Width - 10),
                                Y = (float)rand.Next(5, rect.Height - 10)
                            };
                            LocatePoint(subdiv, fp, img, activeFacetColor);
                            window.Image = img;

                            if (CvWindow.WaitKey(100) >= 0)
                            {
                                break;
                            }
                            subdiv.Insert(fp);
                            subdiv.CalcVoronoi2D();
                            img.Set(bkgndColor);
                            DrawSubdiv(img, subdiv, delaunayColor, voronoiColor);
                            window.Image = img;
                            if (CvWindow.WaitKey(100) >= 0)
                            {
                                break;
                            }
                        }
                        img.Set(bkgndColor);
                        PaintVoronoi(subdiv, img);
                        window.Image = img;

                        CvWindow.WaitKey(0);
                    }
        }
Esempio n. 23
0
        public PyrSegmentation()
        {
            // cvPyrSegmentation
            // レベルを指定して画像ピラミッドを作成し,その情報を用いて画像のセグメント化を行なう.

            const double threshold1 = 255.0;
            const double threshold2 = 50.0;

            // (1)画像の読み込み
            using (IplImage srcImg = new IplImage(Const.ImageGoryokaku, LoadMode.AnyDepth | LoadMode.AnyColor))
            {
                // level1から4それぞれでやってみる
                IplImage[] dstImg = new IplImage[4];
                for (int level = 0; level < dstImg.Length; level++)
                {
                    // (2)領域分割のためにROIをセットする
                    CvRect roi = new CvRect()
                    {
                        X      = 0,
                        Y      = 0,
                        Width  = srcImg.Width & -(1 << (level + 1)),
                        Height = srcImg.Height & -(1 << (level + 1))
                    };
                    srcImg.ROI = roi;
                    // (3)分割結果画像出力用の画像領域を確保し,領域分割を実行
                    dstImg[level] = srcImg.Clone();
                    Cv.PyrSegmentation(srcImg, dstImg[level], level + 1, threshold1, threshold2);
                }

                // (4)入力画像と分割結果画像の表示
                CvWindow   wSrc = new CvWindow("src", srcImg);
                CvWindow[] wDst = new CvWindow[dstImg.Length];
                for (int i = 0; i < dstImg.Length; i++)
                {
                    wDst[i] = new CvWindow("dst" + i, dstImg[i]);
                }
                CvWindow.WaitKey();
                CvWindow.DestroyAllWindows();

                foreach (IplImage item in dstImg)
                {
                    item.Dispose();
                }
            }
        }
Esempio n. 24
0
        static void CannyCamera()
        {
            using (CvWindow win = new CvWindow("Canny"))
                using (CvCapture cap = new CvCapture(0))
                {
                    using (IplImage frame = cap.QueryFrame())
                        using (IplImage dst = new IplImage(frame.Size, BitDepth.U8, 1))
                        {
                            frame.CvtColor(dst, ColorConversion.BgrToGray);

                            dst.Canny(dst, 50.0, 50.0, ApertureSize.Size3);

                            win.Image = dst;
                        }

                    CvWindow.WaitKey();
                }
        }
Esempio n. 25
0
        // execute a four point transform calibration
        public static void ApplyFourPointTransform(CvCapture cap, CvWindow win)
        {
            Console.WriteLine("******* Starting Calibration *******");
            CvPoint2D32f[] sPts = null;
            image = cap.QueryFrame();

            while (CvWindow.WaitKey(10) != 27)
            {
                image = cap.QueryFrame();
                sPts  = GetPoints(image, win);

                if (sPts != null)
                {
                    calibrationDone = true;
                    Console.WriteLine("********* Calibration DONE *********\n");
                    break;
                }
            }

            CvPoint tl, tr, br, bl;

            tl = sPts[0];
            tr = sPts[1];
            br = sPts[2];
            bl = sPts[3];

            double widthA   = Math.Sqrt((Math.Pow(br.X - bl.X, 2)) + (Math.Pow(br.Y - bl.Y, 2)));
            double widthB   = Math.Sqrt((Math.Pow(tr.X - tl.X, 2)) + (Math.Pow(tr.Y - tl.Y, 2)));
            int    maxWidth = Math.Max((int)widthA, (int)widthB);

            double heightA   = Math.Sqrt((Math.Pow(tr.X - br.X, 2)) + (Math.Pow(tr.Y - br.Y, 2)));
            double heightB   = Math.Sqrt((Math.Pow(tl.X - bl.X, 2)) + (Math.Pow(tl.Y - bl.Y, 2)));
            int    maxHeight = Math.Max((int)heightA, (int)heightB);

            CvPoint2D32f[] dPts = new CvPoint2D32f[4];
            dPts[0] = new CvPoint2D32f(0, 0);
            dPts[1] = new CvPoint2D32f(image.Width, 0);
            dPts[2] = new CvPoint2D32f(image.Width, image.Height);
            dPts[3] = new CvPoint2D32f(0, image.Height);

            correctionMatrix = Cv.GetPerspectiveTransform(sPts, dPts);
            Cv.WarpPerspective(image, image, correctionMatrix);
        }
Esempio n. 26
0
        // find circles/dots using blob detection
        private static void FindBlob(CvCapture cap, CvWindow winScr)
        {
            SimpleBlobDetector.Params blobParameters = new SimpleBlobDetector.Params();

            // threshold (gray value)
            blobParameters.MinThreshold = blobMinThreshold;
            blobParameters.MaxThreshold = blobMaxThreshold;
            // area (pixel count)
            blobParameters.FilterByArea = true;
            blobParameters.MinArea      = blobMinArea;
            blobParameters.MaxArea      = blobMaxArea;
            // circularity
            blobParameters.FilterByCircularity = true;
            blobParameters.MinCircularity      = blobMinCircularity;
            // convexity - probably not needed - maybe eleminates false positives
            blobParameters.FilterByConvexity = true;
            blobParameters.MinConvexity      = blobMinConvexity;
            //// inertia - what does the values mean exactly
            //blobParameters.FilterByInertia = true;
            //blobParameters.MinInertiaRatio =

            SimpleBlobDetector blobDetector = new SimpleBlobDetector(blobParameters);

            gray = new IplImage(cap.QueryFrame().Size, BitDepth.U8, 1);

            while (CvWindow.WaitKey(10) != 27)
            {
                IplImage iplImage = PerspectiveCorretoin.GetCorrectedImage(cap.QueryFrame());
                Cv.CvtColor(iplImage, gray, ColorConversion.RgbToGray);

                Mat mat = new Mat(gray);
                mat.PyrDown(new Size(mat.Width / 2, mat.Height / 2));

                KeyPoint[] keypoints = blobDetector.Detect(mat);

                foreach (KeyPoint item in keypoints)
                {
                    Cv.DrawCircle(gray, new CvPoint2D32f(item.Pt.X, item.Pt.Y), (int)(item.Size * 3), CvColor.Green);
                    Console.WriteLine("Found blob | size = " + item.Size);
                }
                winScr.Image = gray;
            }
        }
        public PyrMeanShiftFiltering()
        {
            // cvPyrMeanShiftFiltering
            // 平均値シフト法による画像のセグメント化を行う

            const int level = 2;

            // (1)画像の読み込み
            using (IplImage srcImg = new IplImage(Const.ImageGoryokaku, LoadMode.AnyDepth | LoadMode.AnyColor))
            {
                if (srcImg.NChannels != 3)
                {
                    throw new Exception();
                }
                if (srcImg.Depth != BitDepth.U8)
                {
                    throw new Exception();
                }

                // (2)領域分割のためにROIをセットする
                CvRect roi = new CvRect
                {
                    X      = 0,
                    Y      = 0,
                    Width  = srcImg.Width & -(1 << level),
                    Height = srcImg.Height & -(1 << level)
                };
                srcImg.ROI = roi;

                // (3)分割結果画像出力用の画像領域を確保し,領域分割を実行
                using (IplImage dstImg = srcImg.Clone())
                {
                    Cv.PyrMeanShiftFiltering(srcImg, dstImg, 30.0, 30.0, level, new CvTermCriteria(5, 1));
                    // (4)入力画像と分割結果画像の表示
                    using (CvWindow wSrc = new CvWindow("Source", srcImg))
                        using (CvWindow wDst = new CvWindow("MeanShift", dstImg))
                        {
                            CvWindow.WaitKey();
                        }
                }
            }
        }
Esempio n. 28
0
 public Threshold()
 {
     using (IplImage src = new IplImage(FilePath.Image.Lenna, LoadMode.Color))
         using (IplImage srcGray = new IplImage(src.Size, BitDepth.U8, 1))
             using (IplImage dst = new IplImage(src.Size, BitDepth.U8, 1))
                 using (CvWindow window = new CvWindow("SampleThreshold"))
                 {
                     src.CvtColor(srcGray, ColorConversion.BgrToGray);
                     srcGray.Smooth(srcGray, SmoothType.Gaussian, 5);
                     int threshold = 90;
                     window.CreateTrackbar("threshold", threshold, 255, delegate(int pos)
                     {
                         srcGray.Threshold(dst, pos, 255, ThresholdType.Binary);
                         window.Image = dst;
                     });
                     srcGray.Threshold(dst, threshold, 255, ThresholdType.Binary);
                     window.Image = dst;
                     CvWindow.WaitKey();
                 }
 }
Esempio n. 29
0
        public PyrSegmentation()
        {
            const double threshold1 = 255.0;
            const double threshold2 = 50.0;

            using (IplImage srcImg = new IplImage(FilePath.Image.Goryokaku, LoadMode.AnyDepth | LoadMode.AnyColor))
            {
                IplImage[] dstImg = new IplImage[4];
                for (int level = 0; level < dstImg.Length; level++)
                {
                    CvRect roi = new CvRect()
                    {
                        X      = 0,
                        Y      = 0,
                        Width  = srcImg.Width & -(1 << (level + 1)),
                        Height = srcImg.Height & -(1 << (level + 1))
                    };
                    srcImg.ROI = roi;

                    dstImg[level] = srcImg.Clone();
                    Cv.PyrSegmentation(srcImg, dstImg[level], level + 1, threshold1, threshold2);
                }

                CvWindow   wSrc = new CvWindow("src", srcImg);
                CvWindow[] wDst = new CvWindow[dstImg.Length];
                for (int i = 0; i < dstImg.Length; i++)
                {
                    wDst[i] = new CvWindow("dst" + i, dstImg[i]);
                }
                CvWindow.WaitKey();
                CvWindow.DestroyAllWindows();

                foreach (IplImage item in dstImg)
                {
                    item.Dispose();
                }
            }
        }
Esempio n. 30
0
        public Template()
        {
            CvCapture cap = CvCapture.FromCamera(1);
            CvWindow  w   = new CvWindow("Template Matching");

            IplImage tpl = Cv.LoadImage("speedlimit55.jpg", LoadMode.Color);

            CvPoint minloc, maxloc;

            double minval, maxval;

            while (CvWindow.WaitKey(10) < 0)
            {
                IplImage img = cap.QueryFrame();
                IplImage res = Cv.CreateImage(Cv.Size(img.Width - tpl.Width + 1, img.Height - tpl.Height + 1), BitDepth.F32, 1);
                Cv.MatchTemplate(img, tpl, res, MatchTemplateMethod.CCoeff);
                Cv.MinMaxLoc(res, out minval, out maxval, out minloc, out maxloc, null);
                Cv.Rectangle(img, Cv.Point(minloc.X, minloc.Y), Cv.Point(minloc.X + tpl.Width, minloc.Y + tpl.Height), CvColor.Red, 1, 0, 0);
                w.Image = img;
                Cv.ReleaseImage(res);
                Cv.ReleaseImage(img);
            }
        }