Example #1
0
        public Text()
        {
            // cvInitFont, cvPutText
            // フォントを初期化して,テキストを描画する

            List <FontFace> font_face = new List <FontFace>(
                (FontFace[])Enum.GetValues(typeof(FontFace))
                );

            font_face.Remove(FontFace.Italic);

            // (1)画像を確保し初期化する
            using (IplImage img = Cv.CreateImage(new CvSize(450, 600), BitDepth.U8, 3))
            {
                Cv.Zero(img);
                // (2)フォント構造体を初期化する
                CvFont[] font = new CvFont[font_face.Count * 2];
                for (int i = 0; i < font.Length; i += 2)
                {
                    font[i]     = new CvFont(font_face[i / 2], 1.0, 1.0);
                    font[i + 1] = new CvFont(font_face[i / 2] | FontFace.Italic, 1.0, 1.0);
                }
                // (3)フォントを指定して,テキストを描画する
                for (int i = 0; i < font.Length; i++)
                {
                    CvColor rcolor = CvColor.Random();
                    Cv.PutText(img, "OpenCV sample code", new CvPoint(15, (i + 1) * 30), font[i], rcolor);
                }
                // (4)画像の表示,キーが押されたときに終了
                using (CvWindow w = new CvWindow(img))
                {
                    CvWindow.WaitKey(0);
                }
            }
        }
        public void Process(CvCapture capture)
        {
            FrameCounter = 0;
            while (true)
            {
                FrameCounter++;
                var frame = capture.QueryFrame();
                if (frame == null)
                {
                    break;
                }

                if (FrameCounter > FrameSkipValue)
                {
                    var method = new ProcessIplImage(Process);
                    method.BeginInvoke(frame, null, null);
                    FrameCounter = 0;
                }

                var key = CvWindow.WaitKey(33);
                if (key == 27)
                {
                    break;
                }
            }
        }
Example #3
0
 public Snake()
 {
     using (var src = new IplImage(FilePath.Image.Cake, LoadMode.GrayScale))
         using (var dst = new IplImage(src.Size, BitDepth.U8, 3))
         {
             CvPoint[] contour = new CvPoint[100];
             CvPoint   center  = new CvPoint(src.Width / 2, src.Height / 2);
             for (int i = 0; i < contour.Length; i++)
             {
                 contour[i].X = (int)(center.X * Math.Cos(2 * Math.PI * i / contour.Length) + center.X);
                 contour[i].Y = (int)(center.Y * Math.Sin(2 * Math.PI * i / contour.Length) + center.Y);
             }
             Console.WriteLine("Press any key to snake\nEsc - quit");
             using (var window = new CvWindow())
             {
                 while (true)
                 {
                     src.SnakeImage(contour, 0.45f, 0.35f, 0.2f, new CvSize(15, 15), new CvTermCriteria(1), true);
                     src.CvtColor(dst, ColorConversion.GrayToRgb);
                     for (int i = 0; i < contour.Length - 1; i++)
                     {
                         dst.Line(contour[i], contour[i + 1], new CvColor(255, 0, 0), 2);
                     }
                     dst.Line(contour[contour.Length - 1], contour[0], new CvColor(255, 0, 0), 2);
                     window.Image = dst;
                     int key = CvWindow.WaitKey();
                     if (key == 27)
                     {
                         break;
                     }
                 }
             }
         }
 }
Example #4
0
        public DistTransform()
        {
            // cvDistTransform

            using (var src = new IplImage(FilePath.Image.Lenna, LoadMode.GrayScale))
            {
                if (src.Depth != BitDepth.U8)
                {
                    throw new Exception("Invalid Depth");
                }

                using (var dst = new IplImage(src.Size, BitDepth.F32, 1))
                    using (var dstNorm = new IplImage(src.Size, BitDepth.U8, 1))
                    {
                        Cv.DistTransform(src, dst, DistanceType.L2, 3, null, null);
                        Cv.Normalize(dst, dstNorm, 0.0, 255.0, NormType.MinMax, null);

                        using (new CvWindow("Source", WindowMode.AutoSize, src))
                            using (new CvWindow("Distance Image", WindowMode.AutoSize, dstNorm))
                            {
                                CvWindow.WaitKey(0);
                            }
                    }
            }
        }
Example #5
0
        public void Calculate(IplImage src)
        {
            calc = new IplImage(src.Size, BitDepth.U8, 3);
            IplImage src_symm = this.Symmetry(src);

            Cv.Add(src, src_symm, calc);
            CvWindow window_add = new CvWindow("Add", WindowMode.StretchImage, calc);

            Cv.Sub(src, src_symm, calc);
            CvWindow window_sub = new CvWindow("Sub", WindowMode.StretchImage, calc);

            Cv.Mul(src, src_symm, calc);
            CvWindow window_Mul = new CvWindow("Mul", WindowMode.StretchImage, calc);

            Cv.Div(src, src_symm, calc);
            CvWindow window_div = new CvWindow("Div", WindowMode.StretchImage, calc);

            Cv.Max(src, src_symm, calc);
            CvWindow window_max = new CvWindow("Max", WindowMode.StretchImage, calc);

            Cv.Min(src, src_symm, calc);
            CvWindow window_min = new CvWindow("Min", WindowMode.StretchImage, calc);

            Cv.AbsDiff(src, src_symm, calc);
            CvWindow window_absdiff = new CvWindow("AbsDiff", WindowMode.StretchImage, calc);

            Cv.WaitKey(0);
            {
                CvWindow.DestroyAllWindows();
            }
        }
Example #6
0
        public DistTransform()
        {
            // cvDistTransform
            // 入力画像に対して距離変換を行ない,結果を0-255に正規化し可視化する

            // (1)画像を読み込み
            using (IplImage src = new IplImage(Const.ImageLenna, LoadMode.GrayScale))
            {
                if (src.Depth != BitDepth.U8)
                {
                    throw new Exception("Invalid Depth");
                }
                // (2)処理結果の距離画像出力用の画像領域と表示ウィンドウを確保
                using (IplImage dst = new IplImage(src.Size, BitDepth.F32, 1))
                    using (IplImage dstNorm = new IplImage(src.Size, BitDepth.U8, 1))
                    {
                        // (3)距離画像を計算し,表示用に結果を0-255に正規化する
                        Cv.DistTransform(src, dst, DistanceType.L2, 3, null, null);
                        Cv.Normalize(dst, dstNorm, 0.0, 255.0, NormType.MinMax, null);

                        // (4)距離画像を表示,キーが押されたときに終了
                        using (new CvWindow("Source", WindowMode.AutoSize, src))
                            using (new CvWindow("Distance Image", WindowMode.AutoSize, dstNorm))
                            {
                                CvWindow.WaitKey(0);
                            }
                    }
            }
        }
Example #7
0
        public EyeDetect()
        {
            CvColor[] colors = new CvColor[] {
                new CvColor(0, 0, 255),
                new CvColor(0, 128, 255),
                new CvColor(0, 255, 255),
                new CvColor(0, 255, 0),
                new CvColor(255, 128, 0),
                new CvColor(255, 255, 0),
                new CvColor(255, 0, 0),
                new CvColor(255, 0, 255),
            };

            const double Scale        = 1.25;
            const double ScaleFactor  = 2.5;
            const int    MinNeighbors = 2;

            using (CvCapture cap = CvCapture.FromCamera(1))
                using (CvWindow w = new CvWindow("Eye Tracker"))
                {
                    while (CvWindow.WaitKey(10) < 0)
                    {
                        using (IplImage img = cap.QueryFrame())
                            using (IplImage smallImg = new IplImage(new CvSize(Cv.Round(img.Width / Scale), Cv.Round(img.Height / Scale)), BitDepth.U8, 1))
                            {
                                using (IplImage gray = new IplImage(img.Size, BitDepth.U8, 1))
                                {
                                    Cv.CvtColor(img, gray, ColorConversion.BgrToGray);
                                    Cv.Resize(gray, smallImg, Interpolation.Linear);
                                    Cv.EqualizeHist(smallImg, smallImg);
                                }

                                using (CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile("C:\\Program Files\\OpenCV\\data\\haarcascades\\haarcascade_eye.xml"))
                                    using (CvMemStorage storage = new CvMemStorage())
                                    {
                                        storage.Clear();

                                        Stopwatch         watch = Stopwatch.StartNew();
                                        CvSeq <CvAvgComp> eyes  = Cv.HaarDetectObjects(smallImg, cascade, storage, ScaleFactor, MinNeighbors, 0, new CvSize(30, 30));
                                        watch.Stop();
                                        //Console.WriteLine("detection time = {0}msn", watch.ElapsedMilliseconds);

                                        for (int i = 0; i < eyes.Total; i++)
                                        {
                                            CvRect  r      = eyes[i].Value.Rect;
                                            CvPoint center = new CvPoint
                                            {
                                                X = Cv.Round((r.X + r.Width * 0.5) * Scale),
                                                Y = Cv.Round((r.Y + r.Height * 0.5) * Scale)
                                            };
                                            int radius = Cv.Round((r.Width + r.Height) * 0.25 * Scale);
                                            img.Circle(center, radius, colors[i % 8], 3, LineType.AntiAlias, 0);
                                        }
                                    }

                                w.Image = img;
                            }
                    }
                }
        }
Example #8
0
 /// <summary>
 /// 入力特徴量を図にする
 /// </summary>
 /// <param name="data_array"></param>
 private void Debug_DrawInputFeature(CvPoint2D32f[] points, int[] id_array)
 {
     using (IplImage pointsPlot = Cv.CreateImage(new CvSize(300, 300), BitDepth.U8, 3))
     {
         pointsPlot.Zero();
         for (int i = 0; i < id_array.Length; i++)
         {
             int x   = (int)(points[i].X * 300);
             int y   = (int)(300 - points[i].Y * 300);
             int res = id_array[i];
             //                    CvColor color = (res == 1) ? CvColor.Red : CvColor.GreenYellow;
             CvColor color = new CvColor();
             if (res == 1)
             {
                 color = CvColor.Red;
             }
             else if (res == 2)
             {
                 color = CvColor.GreenYellow;
             }
             pointsPlot.Circle(x, y, 2, color, -1);
         }
         CvWindow.ShowImages(pointsPlot);
     }
 }
Example #9
0
        public VideoWriter()
        {
            // (1)カメラに対するキャプチャ構造体を作成する
            using (CvCapture capture = CvCapture.FromCamera(0))
            {
                // (2)キャプチャサイズを取得する(この設定は,利用するカメラに依存する)
                int    width  = capture.FrameWidth;
                int    height = capture.FrameHeight;
                double fps    = 15;//capture.Fps;
                // (3)ビデオライタ構造体を作成する
                using (CvVideoWriter writer = new CvVideoWriter("cap.avi", FourCC.Prompt, fps, new CvSize(width, height)))
                    using (CvFont font = new CvFont(FontFace.HersheyComplex, 0.7, 0.7))
                        using (CvWindow window = new CvWindow("Capture", WindowMode.AutoSize))
                        {
                            // (4)カメラから画像をキャプチャし,ファイルに書き出す
                            for (int frames = 0; ; frames++)
                            {
                                IplImage frame = capture.QueryFrame();
                                string   str   = string.Format("{0}[frame]", frames);
                                frame.PutText(str, new CvPoint(10, 20), font, new CvColor(0, 255, 100));
                                writer.WriteFrame(frame);
                                window.ShowImage(frame);

                                int key = CvWindow.WaitKey((int)(1000 / fps));
                                if (key == '\x1b')
                                {
                                    break;
                                }
                            }
                        }
            }
        }
Example #10
0
        public HoughCircles()
        {
            using (IplImage imgSrc = new IplImage(FilePath.Image.Walkman, LoadMode.Color))
                using (IplImage imgGray = new IplImage(imgSrc.Size, BitDepth.U8, 1))
                    using (IplImage imgHough = imgSrc.Clone())
                    {
                        Cv.CvtColor(imgSrc, imgGray, ColorConversion.BgrToGray);
                        Cv.Smooth(imgGray, imgGray, SmoothType.Gaussian, 9);
                        //Cv.Canny(imgGray, imgGray, 75, 150, ApertureSize.Size3);

                        using (var storage = new CvMemStorage())
                        {
                            CvSeq <CvCircleSegment> seq = imgGray.HoughCircles(storage, HoughCirclesMethod.Gradient, 1, 100, 150, 55, 0, 0);
                            foreach (CvCircleSegment item in seq)
                            {
                                imgHough.Circle(item.Center, (int)item.Radius, CvColor.Red, 3);
                            }
                        }

                        using (new CvWindow("gray", WindowMode.AutoSize, imgGray))
                            using (new CvWindow("Hough circles", WindowMode.AutoSize, imgHough))
                            {
                                CvWindow.WaitKey(0);
                            }
                    }
        }
Example #11
0
        public FaceDetect()
        {
            CheckMemoryLeak();

            // CvHaarClassifierCascade, cvHaarDetectObjects

            CvColor[] colors = new CvColor[] {
                new CvColor(0, 0, 255),
                new CvColor(0, 128, 255),
                new CvColor(0, 255, 255),
                new CvColor(0, 255, 0),
                new CvColor(255, 128, 0),
                new CvColor(255, 255, 0),
                new CvColor(255, 0, 0),
                new CvColor(255, 0, 255),
            };

            const double Scale        = 1.14;
            const double ScaleFactor  = 1.0850;
            const int    MinNeighbors = 2;

            using (IplImage img = new IplImage(FilePath.Image.Yalta, LoadMode.Color))
                using (IplImage smallImg = new IplImage(new CvSize(Cv.Round(img.Width / Scale), Cv.Round(img.Height / Scale)), BitDepth.U8, 1))
                {
                    using (IplImage gray = new IplImage(img.Size, BitDepth.U8, 1))
                    {
                        Cv.CvtColor(img, gray, ColorConversion.BgrToGray);
                        Cv.Resize(gray, smallImg, Interpolation.Linear);
                        Cv.EqualizeHist(smallImg, smallImg);
                    }

                    using (var cascade = CvHaarClassifierCascade.FromFile(FilePath.Text.HaarCascade))
                        using (var storage = new CvMemStorage())
                        {
                            storage.Clear();

                            // 顔の検出
                            Stopwatch         watch = Stopwatch.StartNew();
                            CvSeq <CvAvgComp> faces = Cv.HaarDetectObjects(smallImg, cascade, storage, ScaleFactor, MinNeighbors, 0, new CvSize(30, 30));
                            watch.Stop();
                            Console.WriteLine("detection time = {0}ms\n", watch.ElapsedMilliseconds);

                            // 検出した箇所にまるをつける
                            for (int i = 0; i < faces.Total; i++)
                            {
                                CvRect  r      = faces[i].Value.Rect;
                                CvPoint center = new CvPoint
                                {
                                    X = Cv.Round((r.X + r.Width * 0.5) * Scale),
                                    Y = Cv.Round((r.Y + r.Height * 0.5) * Scale)
                                };
                                int radius = Cv.Round((r.Width + r.Height) * 0.25 * Scale);
                                img.Circle(center, radius, colors[i % 8], 3, LineType.AntiAlias, 0);
                            }
                        }

                    // ウィンドウに表示
                    CvWindow.ShowImages(img);
                }
        }
Example #12
0
        /// <summary>
        /// used to dispose any object created from this class
        /// </summary>
        public void Dispose()
        {
            if (workingThread.IsAlive)
            {
                workingThread.Abort();
            }

            if (workingThread != null)
            {
                workingThread = null;
            }

            if (window1 != null)
            {
                window1.Close();
                window1.Dispose();
                window1 = null;
            }

            if (window2 != null)
            {
                window2.Close();
                window2.Dispose();
                window2 = null;
            }
        }
Example #13
0
        public FitLine()
        {
            CvSize imageSize = new CvSize(500, 500);

            // cvFitLine
            CvPoint2D32f[] points = GetRandomPoints(20, imageSize);
            CvLine2D       line   = Cv.FitLine2D(points, DistanceType.L2, 0, 0.01, 0.01);

            using (IplImage img = new IplImage(imageSize, BitDepth.U8, 3))
            {
                img.Zero();

                // draw line
                {
                    CvPoint pt1, pt2;
                    line.FitSize(img.Width, img.Height, out pt1, out pt2);
                    img.Line(pt1, pt2, CvColor.Green, 1, LineType.Link8);
                }

                // draw points and distances
                using (CvFont font = new CvFont(FontFace.HersheySimplex, 0.33, 0.33))
                {
                    foreach (CvPoint2D32f p in points)
                    {
                        double d = line.Distance(p);

                        img.Circle(p, 2, CvColor.White, -1, LineType.AntiAlias);
                        img.PutText(string.Format("{0:F1}", d), new CvPoint((int)(p.X + 3), (int)(p.Y + 3)), font, CvColor.Green);
                    }
                }

                CvWindow.ShowImages(img);
            }
        }
Example #14
0
        public Undistort()
        {
            // cvUndistort2
            // キャリブレーションデータを利用して,歪みを補正する

            // (1)補正対象となる画像の読み込み
            using (IplImage srcImg = new IplImage(Const.ImageDistortion, LoadMode.Color))
                using (IplImage dstImg = srcImg.Clone())
                {
                    // (2)パラメータファイルの読み込み
                    CvMat intrinsic, distortion;
                    using (CvFileStorage fs = new CvFileStorage(Const.XmlCamera, null, FileStorageMode.Read))
                    {
                        CvFileNode param = fs.GetFileNodeByName(null, "intrinsic");
                        intrinsic  = fs.Read <CvMat>(param);
                        param      = fs.GetFileNodeByName(null, "distortion");
                        distortion = fs.Read <CvMat>(param);
                    }

                    // (3)歪み補正
                    Cv.Undistort2(srcImg, dstImg, intrinsic, distortion);

                    // (4)画像を表示,キーが押されたときに終了
                    using (CvWindow w1 = new CvWindow("Distortion", WindowMode.AutoSize, srcImg))
                        using (CvWindow w2 = new CvWindow("Undistortion", WindowMode.AutoSize, dstImg))
                        {
                            CvWindow.WaitKey(0);
                        }

                    intrinsic.Dispose();
                    distortion.Dispose();
                }
        }
Example #15
0
        //--------------------------------------------------------------------------------------
        // private
        //---------------------------------------------------------------------------------------

        private void Debug_DispPredict()
        {
            using (IplImage retPlot = new IplImage(300, 300, BitDepth.U8, 3))
            {
                for (int x = 0; x < 300; x++)
                {
                    for (int y = 0; y < 300; y++)
                    {
                        float[] sample    = { x / 300f, y / 300f };
                        CvMat   sampleMat = new CvMat(1, 2, MatrixType.F32C1, sample);
                        int     ret       = (int)svm.Predict(sampleMat);
                        CvRect  plotRect  = new CvRect(x, 300 - y, 1, 1);
                        if (ret == 1)
                        {
                            retPlot.Rectangle(plotRect, CvColor.Red);
                        }
                        else if (ret == 2)
                        {
                            retPlot.Rectangle(plotRect, CvColor.GreenYellow);
                        }
                    }
                }
                CvWindow.ShowImages(retPlot);
            }
        }
Example #16
0
        public Undistort()
        {
            using (IplImage srcImg = new IplImage(FilePath.Image.Distortion, LoadMode.Color))
                using (IplImage dstImg = srcImg.Clone())
                {
                    CvMat intrinsic, distortion;
                    using (CvFileStorage fs = new CvFileStorage(FilePath.Text.Camera, null, FileStorageMode.Read))
                    {
                        CvFileNode param = fs.GetFileNodeByName(null, "intrinsic");
                        intrinsic  = fs.Read <CvMat>(param);
                        param      = fs.GetFileNodeByName(null, "distortion");
                        distortion = fs.Read <CvMat>(param);
                    }

                    Cv.Undistort2(srcImg, dstImg, intrinsic, distortion);

                    using (new CvWindow("Distortion", WindowMode.AutoSize, srcImg))
                        using (new CvWindow("Undistortion", WindowMode.AutoSize, dstImg))
                        {
                            CvWindow.WaitKey(0);
                        }

                    intrinsic.Dispose();
                    distortion.Dispose();
                }
        }
 // Use this for initialization
 void Start()
 {
     //CvCapture capture;
     // Webカメラを使うという宣言
     _Capture = Cv.CreateCameraCapture(0);
     // 横幅と縦幅の設定
     Cv.SetCaptureProperty(_Capture, CaptureProperty.FrameWidth, CAPTURE_WIDTH);
     Cv.SetCaptureProperty(_Capture, CaptureProperty.FrameHeight, CAPTURE_HEIGHT);
     // Webカメラからフレーム取得
     //IplImage frame;
     _Frame = Cv.QueryFrame(_Capture);
     // Unity上に縦幅と横幅のフレームサイズをコンソール出力
     Debug.Log("width:" + _Frame.Width + " height:" + _Frame.Height);
     // 動画内のRGBを取るための変数を初期化
     _Window = new CvWindow("GetRGBWindow", _Frame);
     _Window.OnMouseCallback += new CvMouseCallback(GetClickedPixelRgb);
     // 領域分割に使う色をHSVで指定
     pointhsv = new CvScalar(0.0, 0.0, 0.0);
     // ウィンドウの名前設定
     Cv.NamedWindow("Original");
     Cv.NamedWindow("STEP1:Smoothing");
     Cv.NamedWindow("STEP2:HSV");
     Cv.NamedWindow("STEP3:Segmentation");
     Cv.NamedWindow("STEP4:Morphology");
     Cv.NamedWindow("STEP5:Detected");
     Cv.NamedWindow("window");
 }
Example #18
0
        private static void Main(string[] args)
        {
            var processor = BuildProcessor();

            var files = new List <string>();

            var path = args[0];

            if (File.Exists(path))
            {
                files.Add(path);
            }
            else if (Directory.Exists(path))
            {
                files.AddRange(Directory.GetFiles(path, "*.mp4"));
            }

            foreach (var file in files)
            {
                using (var capture = CvCapture.FromFile(file))
                    using (var windowOriginal = new CvWindow("Original", WindowMode.AutoSize))
                        using (var windowPreImage = new CvWindow("Pre-Image", WindowMode.AutoSize))
                            using (var windowPostImage = new CvWindow("Post-Image", WindowMode.AutoSize))
                            {
                                processor.OriginalWindow      = windowOriginal;
                                processor.PreProcessedWindow  = windowPreImage;
                                processor.PostProcessedWindow = windowPostImage;
                                processor.Process(capture);
                            }
            }
        }
Example #19
0
        //作成した辞書を図でみる
        public void Debug_DispPredict()
        {
            return;

            //辞書ファイルのロード
            this.libSVM_model = SVM.LoadModel(@"libsvm_model.xml");

            using (IplImage retPlot = new IplImage(300, 300, BitDepth.U8, 3))
            {
                for (int x = 0; x < 300; x++)
                {
                    for (int y = 0; y < 300; y++)
                    {
                        float[] sample = { x / 300f, y / 300f };
                        //問題を作成
                        SVMNode[] node_array = new SVMNode[2];
                        node_array[0] = new SVMNode(1, sample[0]);
                        node_array[1] = new SVMNode(2, sample[1]);
                        int    ret_double = (int)SVM.Predict(libSVM_model, node_array);
                        int    ret_i      = (int)ret_double;
                        CvRect plotRect   = new CvRect(x, 300 - y, 1, 1);
                        if (ret_i == 1)
                        {
                            retPlot.Rectangle(plotRect, CvColor.Red);
                        }
                        else if (ret_i == 2)
                        {
                            retPlot.Rectangle(plotRect, CvColor.GreenYellow);
                        }
                    }
                }
                CvWindow.ShowImages(retPlot);
            }
        }
Example #20
0
        public Image2Stream()
        {
            // Stream -> IplImage
            using (FileStream stream = new FileStream(FilePath.Image.Lenna, FileMode.Open))
                using (IplImage img = IplImage.FromStream(stream, LoadMode.Color))
                {
                    CvWindow.ShowImages(img);

                    // IplImage -> Stream
                    using (MemoryStream ms = new MemoryStream())
                    {
                        img.ToStream(ms, ".tiff");
                        ms.ToString();
                    }
                }

            // Stream -> CvMat
            using (FileStream stream = new FileStream(FilePath.Image.Lenna, FileMode.Open))
                using (CvMat mat = CvMat.FromStream(stream, LoadMode.Color))
                {
                    mat.ToString();

                    // CvMat -> Stream
                    using (MemoryStream ms = new MemoryStream())
                    {
                        mat.ToStream(ms, ".bmp");
                        ms.ToString();
                    }
                }
        }
Example #21
0
        public SaveImage()
        {
            using (IplImage img = new IplImage(FilePath.Image.Depth16Bit, LoadMode.Color))
            {
                // JPEG quality test
                img.SaveImage("q000.jpg", new JpegEncodingParam(0));
                img.SaveImage("q025.jpg", new JpegEncodingParam(25));
                img.SaveImage("q050.jpg", new JpegEncodingParam(50));
                img.SaveImage("q075.jpg", new JpegEncodingParam(75));
                img.SaveImage("q100.jpg", new JpegEncodingParam(100));

                using (IplImage q000 = new IplImage("q000.jpg", LoadMode.Color))
                    using (IplImage q025 = new IplImage("q025.jpg", LoadMode.Color))
                        using (IplImage q050 = new IplImage("q050.jpg", LoadMode.Color))
                            using (IplImage q075 = new IplImage("q075.jpg", LoadMode.Color))
                                using (IplImage q100 = new IplImage("q100.jpg", LoadMode.Color))
                                    using (CvWindow w000 = new CvWindow("quality 0", q000))
                                        using (CvWindow w025 = new CvWindow("quality 25", q025))
                                            using (CvWindow w050 = new CvWindow("quality 50", q050))
                                                using (CvWindow w075 = new CvWindow("quality 75", q075))
                                                    using (CvWindow w100 = new CvWindow("quality 100", q100))
                                                    {
                                                        Cv.WaitKey();
                                                    }
            }
        }
Example #22
0
        public Contour()
        {
            // cvContourArea, cvArcLength
            // 輪郭によって区切られた領域の面積と,輪郭の長さを求める

            const int SIZE = 500;

            // (1)画像を確保し初期化する
            using (CvMemStorage storage = new CvMemStorage())
                using (IplImage img = new IplImage(SIZE, SIZE, BitDepth.U8, 3))
                {
                    img.Zero();
                    // (2)点列を生成する
                    CvSeq <CvPoint> points = new CvSeq <CvPoint>(SeqType.PolyLine, storage);
                    CvRNG           rng    = new CvRNG((ulong)DateTime.Now.Ticks);
                    double          scale  = rng.RandReal() + 0.5;
                    CvPoint         pt0    = new CvPoint
                    {
                        X = (int)(Math.Cos(0) * SIZE / 4 * scale + SIZE / 2),
                        Y = (int)(Math.Sin(0) * SIZE / 4 * scale + SIZE / 2)
                    };
                    img.Circle(pt0, 2, CvColor.Green);
                    points.Push(pt0);
                    for (int i = 1; i < 20; i++)
                    {
                        scale = rng.RandReal() + 0.5;
                        CvPoint pt1 = new CvPoint
                        {
                            X = (int)(Math.Cos(i * 2 * Math.PI / 20) * SIZE / 4 * scale + SIZE / 2),
                            Y = (int)(Math.Sin(i * 2 * Math.PI / 20) * SIZE / 4 * scale + SIZE / 2)
                        };
                        img.Line(pt0, pt1, CvColor.Green, 2);
                        pt0.X = pt1.X;
                        pt0.Y = pt1.Y;
                        img.Circle(pt0, 3, CvColor.Green, Cv.FILLED);
                        points.Push(pt0);
                    }
                    img.Line(pt0, points.GetSeqElem(0).Value, CvColor.Green, 2);
                    // (3)包含矩形,面積,長さを求める
                    CvRect rect   = points.BoundingRect(false);
                    double area   = points.ContourArea();
                    double length = points.ArcLength(CvSlice.WholeSeq, 1);
                    // (4)結果を画像に書き込む
                    img.Rectangle(new CvPoint(rect.X, rect.Y), new CvPoint(rect.X + rect.Width, rect.Y + rect.Height), CvColor.Red, 2);
                    string text_area   = string.Format("Area:   wrect={0}, contour={1}", rect.Width * rect.Height, area);
                    string text_length = string.Format("Length: rect={0}, contour={1}", 2 * (rect.Width + rect.Height), length);
                    using (CvFont font = new CvFont(FontFace.HersheySimplex, 0.7, 0.7, 0, 1, LineType.AntiAlias))
                    {
                        img.PutText(text_area, new CvPoint(10, img.Height - 30), font, CvColor.White);
                        img.PutText(text_length, new CvPoint(10, img.Height - 10), font, CvColor.White);
                    }
                    // (5)画像を表示,キーが押されたときに終了
                    using (CvWindow window = new CvWindow("BoundingRect", WindowMode.AutoSize))
                    {
                        window.Image = img;
                        CvWindow.WaitKey(0);
                    }
                }
        }
Example #23
0
        void ShowMarkedImages(IplImage scr, IplImage tmpl, CvPoint maxPoint)
        {
            var marked = scr.Clone();
            var rect   = new CvRect(maxPoint, tmpl.Size);

            marked.DrawRect(rect, new CvScalar(0, 0, 255), 2);
            CvWindow.ShowImages(marked);
        }
Example #24
0
 // shows a live view of the current web cam
 private static void Live(CvCapture cap, CvWindow winScr)
 {
     while (CvWindow.WaitKey(10) != 27)
     {
         IplImage src = cap.QueryFrame();
         winScr.Image = src;
     }
 }
Example #25
0
        public PixelSampling()
        {
            // 並進移動のためのピクセルサンプリング cvGetRectSubPix

            // (1) 画像の読み込み,出力用画像領域の確保を行なう
            using (IplImage srcImg = new IplImage(Const.ImageLenna, LoadMode.AnyDepth | LoadMode.AnyColor))
                using (IplImage dstImg = srcImg.Clone())
                {
                    // (2)dst_imgの画像中心になるsrc_img中の位置centerを指定する
                    CvPoint2D32f center = new CvPoint2D32f
                    {
                        X = srcImg.Width - 1,
                        Y = srcImg.Height - 1
                    };
                    // (3)centerが画像中心になるように,GetRectSubPixを用いて画像全体をシフトさせる
                    Cv.GetRectSubPix(srcImg, dstImg, center);
                    // (4)結果を表示する
                    using (CvWindow wSrc = new CvWindow("src"))
                        using (CvWindow wDst = new CvWindow("dst"))
                        {
                            wSrc.Image = srcImg;
                            wDst.Image = dstImg;
                            Cv.WaitKey(0);
                        }
                }


            // 回転移動のためのピクセルサンプリング cvGetQuadrangleSubPix

            const int angle = 45;

            // (1)画像の読み込み,出力用画像領域の確保を行なう
            using (IplImage srcImg = new IplImage(Const.ImageLenna, LoadMode.AnyDepth | LoadMode.AnyColor))
                using (IplImage dstImg = srcImg.Clone())
                {
                    // (2)回転のための行列(アフィン行列)要素を設定し,CvMat行列Mを初期化する
                    float[] m = new float[6];
                    m[0] = (float)(Math.Cos(angle * Cv.PI / 180.0));
                    m[1] = (float)(-Math.Sin(angle * Cv.PI / 180.0));
                    m[2] = srcImg.Width * 0.5f;
                    m[3] = -m[1];
                    m[4] = m[0];
                    m[5] = srcImg.Height * 0.5f;
                    using (CvMat mat = new CvMat(2, 3, MatrixType.F32C1, m))
                    {
                        // (3)指定された回転行列により,GetQuadrangleSubPixを用いて画像全体を回転させる
                        Cv.GetQuadrangleSubPix(srcImg, dstImg, mat);
                        // (4)結果を表示する
                        using (CvWindow wSrc = new CvWindow("src"))
                            using (CvWindow wDst = new CvWindow("dst"))
                            {
                                wSrc.Image = srcImg;
                                wDst.Image = dstImg;
                                Cv.WaitKey(0);
                            }
                    }
                }
        }
Example #26
0
        public IplImage InpaintImage(IplImage src)
        {
            inpaint = new IplImage(src.Size, BitDepth.U8, 3);
            IplImage paint = src.Clone();
            IplImage mask  = new IplImage(src.Size, BitDepth.U8, 1);

            CvWindow win_Paint = new CvWindow("Paint", WindowMode.AutoSize, paint);

            CvPoint prevPt = new CvPoint(-1, -1);

            win_Paint.OnMouseCallback += delegate(MouseEvent eve, int x, int y, MouseEvent flag)
            {
                if (eve == MouseEvent.LButtonDown)
                {
                    prevPt = new CvPoint(x, y);
                }
                else if (eve == MouseEvent.LButtonUp || (flag & MouseEvent.FlagLButton) == 0)
                {
                    prevPt = new CvPoint(-1, -1);
                }
                else if (eve == MouseEvent.MouseMove && (flag & MouseEvent.FlagLButton) != 0)
                {
                    CvPoint pt = new CvPoint(x, y);

                    Cv.DrawLine(mask, prevPt, pt, CvColor.White, 5, LineType.AntiAlias, 0);
                    Cv.DrawLine(paint, prevPt, pt, CvColor.White, 5, LineType.AntiAlias, 0);
                    prevPt = pt;
                    win_Paint.ShowImage(paint);
                }
            };

            bool repeat = true;

            while (repeat)
            {
                switch (CvWindow.WaitKey(0))
                {
                case 'r':
                    mask.SetZero();
                    Cv.Copy(src, paint);
                    win_Paint.ShowImage(paint);
                    break;

                case '\r':
                    CvWindow win_Inpaint = new CvWindow("Inpainted", WindowMode.AutoSize);
                    Cv.Inpaint(paint, mask, inpaint, 3, InpaintMethod.NS);
                    win_Inpaint.ShowImage(inpaint);
                    break;

                case (char)27:
                    CvWindow.DestroyAllWindows();
                    repeat = false;
                    break;
                }
            }
            return(inpaint);
        }
Example #27
0
 static void ShowImage(IplImage image)
 {
     using (CvWindow win = new CvWindow("OpenCV Window"))
     {
         win.ShowImage(image);
         CvWindow.WaitKey(0);
     }
     //ShowImages(new IplImage[] { image });
 }
Example #28
0
        public Watershed()
        {
            // cvWatershed
            // マウスで円形のマーカー(シード領域)の中心を指定し,複数のマーカーを設定する.
            // このマーカを画像のgradientに沿って広げて行き,gradientの高い部分に出来る境界を元に領域を分割する.
            // 領域は,最初に指定したマーカーの数に分割される.

            // (2)画像の読み込み,マーカー画像の初期化,結果表示用画像領域の確保を行なう
            using (IplImage srcImg = new IplImage(Const.ImageGoryokaku, LoadMode.AnyDepth | LoadMode.AnyColor))
                using (IplImage dstImg = srcImg.Clone())
                    using (IplImage dspImg = srcImg.Clone())
                        using (IplImage markers = new IplImage(srcImg.Size, BitDepth.S32, 1))
                        {
                            markers.Zero();

                            // (3)入力画像を表示しシードコンポーネント指定のためのマウスイベントを登録する
                            using (CvWindow wImage = new CvWindow("image", WindowMode.AutoSize))
                            {
                                wImage.Image = srcImg;
                                // クリックにより中心を指定し,円形のシード領域を設定する
                                int seedNum = 0;
                                wImage.OnMouseCallback += delegate(MouseEvent ev, int x, int y, MouseEvent flags)
                                {
                                    if (ev == MouseEvent.LButtonDown)
                                    {
                                        seedNum++;
                                        CvPoint pt = new CvPoint(x, y);
                                        markers.Circle(pt, 20, CvScalar.ScalarAll(seedNum), Cv.FILLED, LineType.Link8, 0);
                                        dspImg.Circle(pt, 20, CvColor.White, 3, LineType.Link8, 0);
                                        wImage.Image = dspImg;
                                    }
                                };
                                CvWindow.WaitKey();
                            }

                            // (4)watershed分割を実行する
                            Cv.Watershed(srcImg, markers);

                            // (5)実行結果の画像中のwatershed境界(ピクセル値=-1)を結果表示用画像上に表示する
                            for (int i = 0; i < markers.Height; i++)
                            {
                                for (int j = 0; j < markers.Width; j++)
                                {
                                    int idx = (int)(markers.Get2D(i, j).Val0);
                                    if (idx == -1)
                                    {
                                        dstImg.Set2D(i, j, CvColor.Red);
                                    }
                                }
                            }
                            using (CvWindow wDst = new CvWindow("watershed transform", WindowMode.AutoSize))
                            {
                                wDst.Image = dstImg;
                                CvWindow.WaitKey();
                            }
                        }
        }
Example #29
0
        public void Run()
        {
            Mat src    = new Mat(FilePath.Image.Lenna511, LoadMode.Color);
            Mat src511 = new Mat(FilePath.Image.Lenna511, LoadMode.Color);

            IplImage ipl    = (IplImage)src;
            IplImage ipl511 = (IplImage)src511;

            CvWindow.ShowImages(ipl, ipl511);
        }
Example #30
0
        private static void Track()
        {
            using (var video = new CvCapture("data/bach.mp4"))
            {
                IplImage frame = null;
                IplImage gray = null;
                IplImage binary = null;
                IplImage render = null;
                IplImage renderTracks = null;
                CvTracks tracks = new CvTracks();
                CvWindow window = new CvWindow("render");
                CvWindow windowTracks = new CvWindow("tracks");

                for (int i = 0; ; i++)
                {
                    frame = video.QueryFrame();
                    //if (frame == null)
                    //    frame = new IplImage("data/shapes.png");
                    if (gray == null)
                    {
                        gray = new IplImage(frame.Size, BitDepth.U8, 1);
                        binary = new IplImage(frame.Size, BitDepth.U8, 1);
                        render = new IplImage(frame.Size, BitDepth.U8, 3);
                        renderTracks = new IplImage(frame.Size, BitDepth.U8, 3);
                    }

                    render.Zero();
                    renderTracks.Zero();

                    Cv.CvtColor(frame, gray, ColorConversion.BgrToGray);
                    Cv.Threshold(gray, binary, 0, 255, ThresholdType.Otsu);

                    CvBlobs blobs = new CvBlobs(binary);
                    CvBlobs newBlobs = new CvBlobs(blobs
                        .OrderByDescending(pair => pair.Value.Area)
                        .Take(200)
                        .ToDictionary(pair => pair.Key, pair => pair.Value), blobs.Labels);
                    newBlobs.RenderBlobs(binary, render);
                    window.ShowImage(render);

                    newBlobs.UpdateTracks(tracks, 10.0, Int32.MaxValue);
                    tracks.Render(binary, renderTracks);
                    windowTracks.ShowImage(renderTracks);

                    Cv.WaitKey(200);
                    Console.WriteLine(i);
                }
            }
        }
Example #31
-1
        /// <summary>
        /// Initialize camera input, frame window and other image objects required.
        /// This is done after getting the settings of the tracker object of this class.
        /// </summary>
        public void InitilizeCamera()
        {
            // Intialize camera
            try
            {
                //capture_ = new Capture(1);
                videoInput = new VideoInput();
            }
            catch (Exception exception)
            {
                MessageBox.Show("Failed to initialize the camera, the program will be closed." +
                                "\n\nThis is the internal error:\n" + exception.Message, "Notify", MessageBoxButtons.OK, MessageBoxIcon.Information);
                System.Diagnostics.Process.GetCurrentProcess().Kill();
            }

            // small frame to decrease computational complexity
            size = new CvSize(320, 240);

            videoInput.SetupDevice(deviceID, size.Width, size.Height);
            videoInput.SetIdealFramerate(deviceID, 30);

            frame            = new IplImage(size, BitDepth.U8, 3);
            grayFrame        = new IplImage(size, BitDepth.U8, 1);
            transformedFrame = new IplImage(size, BitDepth.U8, 1);

            // window to view what's going on
            window = new CvWindow("Force Field Transform", WindowMode.KeepRatio);
            window.Resize(320, 240);
            window.Move(screenWidth - 614, 55);
        }