Ejemplo n.º 1
0
        public LatentSVM()
        {
            using (var detector = new CvLatentSvmDetector(FilePath.Text.LatentSvmCat))
            using (var imageSrc = new IplImage(FilePath.Image.Cat, LoadMode.Color))
            using (var imageDst = imageSrc.Clone())
            using (var storage = new CvMemStorage())
            {
                Console.WriteLine("Running LatentSVM...");
                Stopwatch watch = Stopwatch.StartNew();

                CvSeq<CvObjectDetection> result = detector.DetectObjects(imageSrc, storage, 0.5f, 2);

                watch.Stop();
                Console.WriteLine("Elapsed time: {0}ms", watch.ElapsedMilliseconds);

                foreach (CvObjectDetection detection in result)
                {
                    CvRect boundingBox = detection.Rect;
                    imageDst.Rectangle(
                        new CvPoint(boundingBox.X, boundingBox.Y), 
                        new CvPoint(boundingBox.X + boundingBox.Width, boundingBox.Y + boundingBox.Height),
                        CvColor.Red, 3);
                }

                using (new CvWindow("LatentSVM result", imageDst))
                {
                    Cv.WaitKey();
                }
            }
        }
Ejemplo n.º 2
0
        public Affine()
        {
            // cvGetAffineTransform + cvWarpAffine
            // 画像上の3点対応よりアフィン変換行列を計算し,その行列を用いて画像全体のアフィン変換を行う.

            // (1)画像の読み込み,出力用画像領域の確保を行なう
            using (IplImage srcImg = new IplImage(Const.ImageGoryokaku, LoadMode.AnyDepth | LoadMode.AnyColor))
            using (IplImage dstImg = srcImg.Clone())
            {

                // (2)三角形の回転前と回転後の対応する頂点をそれぞれセットし  
                //    cvGetAffineTransformを用いてアフィン行列を求める  
                CvPoint2D32f[] srcPnt = new CvPoint2D32f[3];
                CvPoint2D32f[] dstPnt = new CvPoint2D32f[3];
                srcPnt[0] = new CvPoint2D32f(200.0f, 200.0f);
                srcPnt[1] = new CvPoint2D32f(250.0f, 200.0f);
                srcPnt[2] = new CvPoint2D32f(200.0f, 100.0f);
                dstPnt[0] = new CvPoint2D32f(300.0f, 100.0f);
                dstPnt[1] = new CvPoint2D32f(300.0f, 50.0f);
                dstPnt[2] = new CvPoint2D32f(200.0f, 100.0f);
                using (CvMat mapMatrix = Cv.GetAffineTransform(srcPnt, dstPnt))
                {
                    // (3)指定されたアフィン行列により,cvWarpAffineを用いて画像を回転させる
                    Cv.WarpAffine(srcImg, dstImg, mapMatrix, Interpolation.Linear | Interpolation.FillOutliers, CvScalar.ScalarAll(0));
                    // (4)結果を表示する
                    using (new CvWindow("src", srcImg)) 
                    using (new CvWindow("dst", dstImg))
                    {
                        Cv.WaitKey(0);
                    }
                }
            }
        }
Ejemplo n.º 3
0
        public static OpenCvSharp.IplImage GetSub(this OpenCvSharp.IplImage ipl, OpenCvSharp.CvRect subRect)
        {
            if (ipl == null)
            {
                throw new ArgumentNullException("ipl", "ipl is null.");
            }

            var boundingRect = new CvRect(0, 0, ipl.Width, ipl.Height);

            if (!boundingRect.Contains(subRect))
            {
                throw new InvalidOperationException("subRect is outside of ipl");
            }


            try
            {
                ipl.SetROI(subRect);

                OpenCvSharp.IplImage sub = new IplImage(
                    ipl.GetSize(),
                    ipl.Depth,
                    ipl.NChannels);

                ipl.Copy(sub);
                return(sub);
            }
            finally
            {
                ipl.ResetROI();
            }
        }
Ejemplo n.º 4
0
        public Perspective()
        {
            // cvGetPerspectiveTransform + cvWarpPerspective
            // 画像上の4点対応より透視投影変換行列を計算し,その行列を用いて画像全体の透視投影変換を行う.

            // (1)画像の読み込み,出力用画像領域の確保を行なう
            using (IplImage srcImg = new IplImage(Const.ImageLenna, LoadMode.AnyDepth | LoadMode.AnyColor))
            using (IplImage dstImg = srcImg.Clone())
            {
                // (2)四角形の変換前と変換後の対応する頂点をそれぞれセットし
                //    cvWarpPerspectiveを用いて透視投影変換行列を求める  
                CvPoint2D32f[] srcPnt = new CvPoint2D32f[4];
                CvPoint2D32f[] dstPnt = new CvPoint2D32f[4];
                srcPnt[0] = new CvPoint2D32f(150.0f, 150.0f);
                srcPnt[1] = new CvPoint2D32f(150.0f, 300.0f);
                srcPnt[2] = new CvPoint2D32f(350.0f, 300.0f);
                srcPnt[3] = new CvPoint2D32f(350.0f, 150.0f);
                dstPnt[0] = new CvPoint2D32f(200.0f, 200.0f);
                dstPnt[1] = new CvPoint2D32f(150.0f, 300.0f);
                dstPnt[2] = new CvPoint2D32f(350.0f, 300.0f);
                dstPnt[3] = new CvPoint2D32f(300.0f, 200.0f);
                using (CvMat mapMatrix = Cv.GetPerspectiveTransform(srcPnt, dstPnt))
                {
                    // (3)指定されたアフィン行列により,cvWarpAffineを用いて画像を回転させる
                    Cv.WarpPerspective(srcImg, dstImg, mapMatrix, Interpolation.Linear | Interpolation.FillOutliers, CvScalar.ScalarAll(100));
                    // (4)結果を表示する
                    using (new CvWindow("src", srcImg))
                    using (new CvWindow("dst", dstImg))
                    {
                        Cv.WaitKey(0);
                    }
                }
            }
        }
Ejemplo n.º 5
0
        public int FindPlates()
        {
            IplImage labelImg = new IplImage(src.Size, CvBlobLib.DepthLabel, 1);
            blobs = new CvBlobs();
            plate.Clear();
            CvBlobLib.Label(timg, labelImg, blobs);
            CvBlobLib.FilterByArea(blobs, x, y);
            IplImage srctemp = src.Clone();
            CvBlobLib.RenderBlobs(labelImg, blobs, src, srctemp, RenderBlobsMode.BoundingBox | RenderBlobsMode.Angle);

            foreach (var item in blobs)
            {

                item.Value.SetImageROItoBlob(pimg);
                // ratio values of plate between 3.5 and 5.4
                double ratio = (double)item.Value.Rect.Width / item.Value.Rect.Height;
                double angle = (double)item.Value.CalcAngle();
                //if (ratio > 3.5 && ratio < 5.4 && angle > -15 && angle < 15)
                if (ratio > 1 && ratio < 6 && angle > -15 && angle < 15)
                {
                    //                    IplImage platetemp = new IplImage(new CvSize(pimg.ROI.Width, pimg.ROI.Height), pimg.Depth, pimg.NChannels);
                    IplImage platetemp = new IplImage(new CvSize(140, 27), pimg.Depth, pimg.NChannels);
                    Cv.Resize(pimg, platetemp);
                    //                    Cv.Copy(pimg, platetemp);
                    plate.Add(platetemp);
                    src.Rectangle(item.Value.Rect, new CvScalar(0, 0, 255), 2, LineType.Link4);
                }
            }

            //            CvBlobLib.RenderBlobs(labelImg, blobs, src, src, RenderBlobsMode.BoundingBox);
            src.ResetROI();

            return plate.Count;
        }
        void JpegStreamNewFrame(object sender, NewFrameEventArgs eventArgs)
        {
            if (!_resized)
            {
                if (_displayControl != null)
                {
                    _displayControl.Invalidate();
                }
                this.LiveViewResized(null);
                _resized = true;
            }

            LastImage = eventArgs.Frame;

            if (_motionFramesQueue.Count > MotionQueueSize)
            {
                return;
            }

            var bmp = AForge.Imaging.Image.Clone(eventArgs.Frame);

            OpenCvSharp.IplImage ipl = null;

            try
            {
                ipl = OpenCvSharp.IplImage.FromBitmap(bmp);
            }
            catch (Exception)
            {
                return;
            }
            finally
            {
                if (bmp != null)
                {
                    bmp.Dispose();
                }
            }


            var frame   = new Frame(ipl);
            var grouped = _motionDetector.ProcessFrame(frame);

            if (grouped)
            {
                var motionFrames = _motionDetector.GetMotionFrames();
                if (motionFrames != null)
                {
                    foreach (var motionFrame in motionFrames)
                    {
                        motionFrame.DeviceId = _cameraInfo.Id;
                    }

                    SaveMotionFrames(motionFrames);

                    _motionFramesQueue.Enqueue(motionFrames);
                    _signal.Set();
                }
            }
        }
Ejemplo n.º 7
0
        public HoughCircles()
        {
            using (IplImage imgSrc = new IplImage(Const.ImageWalkman, LoadMode.Color))
            using (IplImage imgGray = new IplImage(imgSrc.Size, BitDepth.U8, 1))
            using (IplImage imgHough = imgSrc.Clone())
            {
                Cv.CvtColor(imgSrc, imgGray, ColorConversion.BgrToGray);
                Cv.Smooth(imgGray, imgGray, SmoothType.Gaussian, 9);
                //Cv.Canny(imgGray, imgGray, 75, 150, ApertureSize.Size3);

                using (CvMemStorage storage = new CvMemStorage())
                {
                    CvSeq<CvCircleSegment> seq = imgGray.HoughCircles(storage, HoughCirclesMethod.Gradient, 1, 100, 150, 55, 0, 0);
                    foreach (CvCircleSegment item in seq)
                    {
                        imgHough.Circle(item.Center, (int)item.Radius, CvColor.Red, 3);
                    }
                }

                // (5)検出結果表示用のウィンドウを確保し表示する
                using (new CvWindow("gray", WindowMode.AutoSize, imgGray))
                using (new CvWindow("Hough circles", WindowMode.AutoSize, imgHough))
                {
                    CvWindow.WaitKey(0);
                }
            }
        }
Ejemplo n.º 8
0
        /// <summary>
        /// Находит контуры на изображении и выбирает из них самый длинный контур являющийся границей дырки
        /// </summary>
        /// <param name="image">Изображение на котором будем искать контуры</param>
        /// <returns>Результат поиска</returns>
        public CvPoint[] FindMostLengthHole(IplImage image)
        {
            CvMemStorage contours = new CvMemStorage();
            CvSeq<CvPoint> firstContour, mostLengthContour = null;
            double maxContourLength = 0, perim = 0;

            // Отделяем изображение от фона
            separateBackground(image, tmpImg);

            // Находим все контуры на изображении
            Cv.FindContours(tmpImg, contours, out firstContour, CvContour.SizeOf, ContourRetrieval.List, ContourChain.ApproxNone);

            // Если не найдено ни одного контура
            if (firstContour == null) return new CvPoint[0];

            // Ищем самый длинный контур
            for (CvSeq<CvPoint> currentContour = firstContour; currentContour.HNext != null; currentContour = currentContour.HNext)
            {
                if (isHole(currentContour))
                {
                    perim = Cv.ContourPerimeter(currentContour);

                    if (perim >= maxContourLength)
                    {
                        maxContourLength = perim;
                        mostLengthContour = currentContour;
                    }
                }
            }

            // Если не найдено ни одной дырки
            if (mostLengthContour == null) return new CvPoint[0];

            return mostLengthContour.ToArray();
        }
Ejemplo n.º 9
0
        public void SetSize(IplImage i)
        {

            this.Size = new Size(i.Size.Width+10, i.Size.Height+10);

            this.pictureBoxIpl1.ImageIpl = i;
        }
Ejemplo n.º 10
0
        public FaceDetect()
        {
            CheckMemoryLeak();

            // CvHaarClassifierCascade, cvHaarDetectObjects

            CvColor[] colors = new CvColor[]{
                new CvColor(0,0,255),
                new CvColor(0,128,255),
                new CvColor(0,255,255),
                new CvColor(0,255,0),
                new CvColor(255,128,0),
                new CvColor(255,255,0),
                new CvColor(255,0,0),
                new CvColor(255,0,255),
            };

            const double Scale = 1.14;
            const double ScaleFactor = 1.0850;
            const int MinNeighbors = 2;

            using (IplImage img = new IplImage(FilePath.Image.Yalta, LoadMode.Color))
            using (IplImage smallImg = new IplImage(new CvSize(Cv.Round(img.Width / Scale), Cv.Round(img.Height / Scale)), BitDepth.U8, 1))
            {
                using (IplImage gray = new IplImage(img.Size, BitDepth.U8, 1))
                {
                    Cv.CvtColor(img, gray, ColorConversion.BgrToGray);
                    Cv.Resize(gray, smallImg, Interpolation.Linear);
                    Cv.EqualizeHist(smallImg, smallImg);
                }

                using (var cascade = CvHaarClassifierCascade.FromFile(FilePath.Text.HaarCascade))  
                using (var storage = new CvMemStorage())
                {
                    storage.Clear();

                    // 顔の検出
                    Stopwatch watch = Stopwatch.StartNew();
                    CvSeq<CvAvgComp> faces = Cv.HaarDetectObjects(smallImg, cascade, storage, ScaleFactor, MinNeighbors, 0, new CvSize(30, 30));
                    watch.Stop();
                    Console.WriteLine("detection time = {0}ms\n", watch.ElapsedMilliseconds);

                    // 検出した箇所にまるをつける
                    for (int i = 0; i < faces.Total; i++)
                    {
                        CvRect r = faces[i].Value.Rect;
                        CvPoint center = new CvPoint
                        {
                            X = Cv.Round((r.X + r.Width * 0.5) * Scale),
                            Y = Cv.Round((r.Y + r.Height * 0.5) * Scale)
                        };
                        int radius = Cv.Round((r.Width + r.Height) * 0.25 * Scale);
                        img.Circle(center, radius, colors[i % 8], 3, LineType.AntiAlias, 0);
                    }
                }

                // ウィンドウに表示
                CvWindow.ShowImages(img);
            }
        }
Ejemplo n.º 11
0
        private static void usingCInterface()
        {
            using (var src = new IplImage(@"..\..\Images\Penguin.Png", LoadMode.AnyDepth | LoadMode.AnyColor))
            using (var dst = new IplImage(src.Size, src.Depth, src.NChannels))
            {
                for (var y = 0; y < src.Height; y++)
                {
                    for (var x = 0; x < src.Width; x++)
                    {
                        CvColor pixel = src[y, x];
                        dst[y, x] = new CvColor
                        {
                            B = (byte)(255 - pixel.B),
                            G = (byte)(255 - pixel.G),
                            R = (byte)(255 - pixel.R)
                        };
                    }
                }

                // [C] Accessing Pixel
                // https://github.com/shimat/opencvsharp/wiki/%5BC%5D-Accessing-Pixel

                using (new CvWindow("C Interface: Src", image: src))
                using (new CvWindow("C Interface: Dst", image: dst))
                {
                    Cv.WaitKey(0);
                }
            }
        }
Ejemplo n.º 12
0
        public CornerDetect()
        {
            int cornerCount = 150;

            using (IplImage dstImg1 = new IplImage(FilePath.Image.Lenna, LoadMode.AnyColor | LoadMode.AnyDepth))
            using (IplImage dstImg2 = dstImg1.Clone())
            using (IplImage srcImgGray = new IplImage(FilePath.Image.Lenna, LoadMode.GrayScale))
            using (IplImage eigImg = new IplImage(srcImgGray.GetSize(), BitDepth.F32, 1))
            using (IplImage tempImg = new IplImage(srcImgGray.GetSize(), BitDepth.F32, 1))
            {
                CvPoint2D32f[] corners;
                Cv.GoodFeaturesToTrack(srcImgGray, eigImg, tempImg, out corners, ref cornerCount, 0.1, 15);
                Cv.FindCornerSubPix(srcImgGray, corners, cornerCount, new CvSize(3, 3), new CvSize(-1, -1), new CvTermCriteria(20, 0.03));

                for (int i = 0; i < cornerCount; i++)
                    Cv.Circle(dstImg1, corners[i], 3, new CvColor(255, 0, 0), 2);

                cornerCount = 150;
                Cv.GoodFeaturesToTrack(srcImgGray, eigImg, tempImg, out corners, ref cornerCount, 0.1, 15, null, 3, true, 0.01);
                Cv.FindCornerSubPix(srcImgGray, corners, cornerCount, new CvSize(3, 3), new CvSize(-1, -1), new CvTermCriteria(20, 0.03));

                for (int i = 0; i < cornerCount; i++)
                    Cv.Circle(dstImg2, corners[i], 3, new CvColor(0, 0, 255), 2);

                using (new CvWindow("EigenVal", WindowMode.AutoSize, dstImg1)) 
                using (new CvWindow("Harris", WindowMode.AutoSize, dstImg2))
                {
                    Cv.WaitKey(0);
                }
            }
        }
Ejemplo n.º 13
0
        public Squares()
        {
            // create memory storage that will contain all the dynamic data
            CvMemStorage storage = new CvMemStorage(0);

            for (int i = 0; i < _names.Length; i++)
            {
                // load i-th image
                using (IplImage img = new IplImage(_names[i], LoadMode.Color))
                {
                    // create window and a trackbar (slider) with parent "image" and set callback
                    // (the slider regulates upper threshold, passed to Canny edge detector) 
                    Cv.NamedWindow(WindowName, WindowMode.AutoSize);

                    // find and draw the squares
                    DrawSquares(img, FindSquares4(img, storage));                    
                }

                // clear memory storage - reset free space position
                storage.Clear(); 

                // wait for key.
                // Also the function cvWaitKey takes care of event processing
                int c = Cv.WaitKey(0);
                if ((char)c == 27)
                    break;
            }

            Cv.DestroyWindow(WindowName);
        }
Ejemplo n.º 14
0
        public Undistort()
        {
            using (IplImage srcImg = new IplImage(FilePath.Image.Distortion, LoadMode.Color))
            using (IplImage dstImg = srcImg.Clone())
            {
                CvMat intrinsic, distortion;
                using (CvFileStorage fs = new CvFileStorage(FilePath.Text.Camera, null, FileStorageMode.Read))
                {
                    CvFileNode param = fs.GetFileNodeByName(null, "intrinsic");
                    intrinsic = fs.Read<CvMat>(param);
                    param = fs.GetFileNodeByName(null, "distortion");
                    distortion = fs.Read<CvMat>(param);
                }
                
                Cv.Undistort2(srcImg, dstImg, intrinsic, distortion);

                using (new CvWindow("Distortion", WindowMode.AutoSize, srcImg))
                using (new CvWindow("Undistortion", WindowMode.AutoSize, dstImg))
                {
                    CvWindow.WaitKey(0);
                }

                intrinsic.Dispose();
                distortion.Dispose();
            }
        }
Ejemplo n.º 15
0
        public 描画画面()
        {
            InitializeComponent();

            dis_height= System.Windows.Forms.Screen.PrimaryScreen.Bounds.Height;
            dis_width=System.Windows.Forms.Screen.PrimaryScreen.Bounds.Width;
            pos_max = Tobii.pos_max;
            while (Tobii. 眼球位置_L[0] == 0 || Tobii. 眼球位置_R[0] == 100) { }//両目とれるまでここにとどまる
            diff_in = Tobii. 眼球位置_R[0]-Tobii. 眼球位置_L[0];
            posY_in = (Tobii.眼球位置_L[1] + Tobii.眼球位置_R[1] )/ 2;

            pictureBoxIpl1.Width = dis_width;
            pictureBoxIpl1.Height = dis_height;
            frame = Cv.CreateImage(new CvSize(dis_width, dis_height), BitDepth.U8, 3);
            background = Cv.CreateImage(new CvSize(dis_width, dis_height), BitDepth.U8, 3);
            background=メイン画面.background;
            pictureBoxIpl1.ImageIpl = background;
            window_size = new CvSize(メイン画面.window[0], メイン画面.window[1]);
            point_old = new CvPoint(window_size.Width / 2, window_size.Height / 2);
            許容半径 = メイン画面.radius;

            PC=new System.Diagnostics.PerformanceCounter[3];

            タイマー開始();
        }
Ejemplo n.º 16
0
 private static void ShowCvWindow(IplImage image)
 {
     Cv.NamedWindow("window");
     Cv.ShowImage("window", image);
     Cv.WaitKey();
     Cv.DestroyWindow("window");
 }
Ejemplo n.º 17
0
        public Undistort()
        {
            // cvUndistort2
            // キャリブレーションデータを利用して,歪みを補正する

            // (1)補正対象となる画像の読み込み
            using (IplImage srcImg = new IplImage(Const.ImageDistortion, LoadMode.Color))
            using (IplImage dstImg = srcImg.Clone())
            {

                // (2)パラメータファイルの読み込み
                CvMat intrinsic, distortion;
                using (CvFileStorage fs = new CvFileStorage(Const.XmlCamera, null, FileStorageMode.Read))
                {
                    CvFileNode param = fs.GetFileNodeByName(null, "intrinsic");
                    intrinsic = fs.Read<CvMat>(param);
                    param = fs.GetFileNodeByName(null, "distortion");
                    distortion = fs.Read<CvMat>(param);
                }

                // (3)歪み補正
                Cv.Undistort2(srcImg, dstImg, intrinsic, distortion);

                // (4)画像を表示,キーが押されたときに終了
                using (CvWindow w1 = new CvWindow("Distortion", WindowMode.AutoSize, srcImg))
                using (CvWindow w2 = new CvWindow("Undistortion", WindowMode.AutoSize, dstImg))
                {
                    CvWindow.WaitKey(0);
                }

                intrinsic.Dispose();
                distortion.Dispose();
            }
        }
Ejemplo n.º 18
0
        public Filter2D()
        {
            // cvFilter2D
            // ユーザが定義したカーネルによるフィルタリング

            // (1)画像の読み込み
            using (IplImage srcImg = new IplImage(Const.ImageFruits, LoadMode.AnyDepth | LoadMode.AnyColor))
            using (IplImage dstImg = new IplImage(srcImg.Size, srcImg.Depth, srcImg.NChannels))
            {
                // (2)カーネルの正規化と,フィルタ処理
                float[] data = {    2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
                                    1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
                };
                CvMat kernel = new CvMat(1, 21, MatrixType.F32C1, data);

                Cv.Normalize(kernel, kernel, 1.0, 0, NormType.L1);
                Cv.Filter2D(srcImg, dstImg, kernel, new CvPoint(0, 0));

                // (3)結果を表示する
                using (CvWindow window = new CvWindow("Filter2D", dstImg))
                {
                    Cv.WaitKey(0);
                }
            }

        }
Ejemplo n.º 19
0
        public static OpenCvSharp.IplImage GetSub(this OpenCvSharp.IplImage ipl, OpenCvSharp.CvRect subRect)
        {
            if (ipl == null)
                throw new ArgumentNullException("ipl", "ipl is null.");

            var boundingRect = new CvRect(0, 0, ipl.Width, ipl.Height);

            if (!boundingRect.Contains(subRect))
                throw new InvalidOperationException("subRect is outside of ipl");

            try
            {
                ipl.SetROI(subRect);

                OpenCvSharp.IplImage sub = new IplImage(
                    ipl.GetSize(),
                    ipl.Depth,
                    ipl.NChannels);

                ipl.Copy(sub);
                return sub;
            }
            finally
            {
                ipl.ResetROI();
            }
        }
Ejemplo n.º 20
0
 public Snake()
 {
     using (IplImage src = new IplImage(Const.ImageCake, LoadMode.GrayScale))
     using (IplImage dst = new IplImage(src.Size, BitDepth.U8, 3))
     {
         CvPoint[] contour = new CvPoint[100];
         CvPoint center = new CvPoint(src.Width / 2, src.Height / 2);
         for (int i = 0; i < contour.Length; i++)
         {
             contour[i].X = (int)(center.X * Math.Cos(2 * Math.PI * i / contour.Length) + center.X);
             contour[i].Y = (int)(center.Y * Math.Sin(2 * Math.PI * i / contour.Length) + center.Y);
         }
         Console.WriteLine("Press any key to snake\nEsc - quit");
         using (CvWindow w = new CvWindow())
         {
             while (true)
             {
                 src.SnakeImage(contour, 0.45f, 0.35f, 0.2f, new CvSize(15, 15), new CvTermCriteria(1), true);
                 src.CvtColor(dst, ColorConversion.GrayToRgb);
                 for (int i = 0; i < contour.Length - 1; i++)
                 {
                     dst.Line(contour[i], contour[i + 1], new CvColor(255, 0, 0), 2);
                 }
                 dst.Line(contour[contour.Length - 1], contour[0], new CvColor(255, 0, 0), 2);
                 w.Image = dst;
                 int key = CvWindow.WaitKey();
                 if (key == 27)
                 {
                     break;
                 }
             }
         }
     }
 }
Ejemplo n.º 21
0
        public CornerDetect()
        {
            // cvGoodFeaturesToTrack, cvFindCornerSubPix
            // 画像中のコーナー(特徴点)検出

            int cornerCount = 150;

            using (IplImage dstImg1 = new IplImage(Const.ImageLenna, LoadMode.AnyColor | LoadMode.AnyDepth))
            using (IplImage dstImg2 = dstImg1.Clone())
            using (IplImage srcImgGray = new IplImage(Const.ImageLenna, LoadMode.GrayScale))
            using (IplImage eigImg = new IplImage(srcImgGray.GetSize(), BitDepth.F32, 1))
            using (IplImage tempImg = new IplImage(srcImgGray.GetSize(), BitDepth.F32, 1))
            {
                CvPoint2D32f[] corners;
                // (1)cvCornerMinEigenValを利用したコーナー検出
                Cv.GoodFeaturesToTrack(srcImgGray, eigImg, tempImg, out corners, ref cornerCount, 0.1, 15);
                Cv.FindCornerSubPix(srcImgGray, corners, cornerCount, new CvSize(3, 3), new CvSize(-1, -1), new CvTermCriteria(20, 0.03));
                // (2)コーナーの描画
                for (int i = 0; i < cornerCount; i++)
                    Cv.Circle(dstImg1, corners[i], 3, new CvColor(255, 0, 0), 2);
                // (3)cvCornerHarrisを利用したコーナー検出
                cornerCount = 150;
                Cv.GoodFeaturesToTrack(srcImgGray, eigImg, tempImg, out corners, ref cornerCount, 0.1, 15, null, 3, true, 0.01);
                Cv.FindCornerSubPix(srcImgGray, corners, cornerCount, new CvSize(3, 3), new CvSize(-1, -1), new CvTermCriteria(20, 0.03));
                // (4)コーナーの描画
                for (int i = 0; i < cornerCount; i++)
                    Cv.Circle(dstImg2, corners[i], 3, new CvColor(0, 0, 255), 2);
                // (5)画像の表示 
                using (new CvWindow("EigenVal", WindowMode.AutoSize, dstImg1)) 
                using (new CvWindow("Harris", WindowMode.AutoSize, dstImg2))
                {
                    Cv.WaitKey(0);
                }
            }
        }
Ejemplo n.º 22
0
        public ActionResult Canny(HttpPostedFileBase imageData)
        {
            if (imageData != null)
            {
                using (var image = IplImage.FromStream(imageData.InputStream, LoadMode.Color))
                {
                    using (var grayImage = new IplImage(image.Size, BitDepth.U8, 1))
                    using (var cannyImage = new IplImage(image.Size, BitDepth.U8, 1))
                    {
                        Cv.CvtColor(image, grayImage, ColorConversion.BgrToGray);
                        Cv.Canny(grayImage, cannyImage, 60, 180);

                        byte[] cannyBytes = cannyImage.ToBytes(".png");
                        string base64 = Convert.ToBase64String(cannyBytes);
                        ViewBag.Base64Image = base64;

                        byte[] originalBytes = image.ToBytes(".png");
                        string base64Org = Convert.ToBase64String(originalBytes);
                        ViewBag.Base64OrgImage = base64Org;

                        byte[] grayBytes = grayImage.ToBytes(".png");
                        string base64Gray = Convert.ToBase64String(grayBytes);
                        ViewBag.Base64GrayImage = base64Gray;
                    }
                }
            }

            return View();
        }
Ejemplo n.º 23
0
        public ConvertToWriteableBitmap()
        {
            WriteableBitmap wb = null;

            // OpenCVによる画像処理 (Threshold)
            using (IplImage src = new IplImage(Const.ImageLenna, LoadMode.GrayScale))
            using (IplImage dst = new IplImage(src.Size, BitDepth.U8, 1))
            {
                src.Smooth(src, SmoothType.Gaussian, 5);
                src.Threshold(dst, 0, 255, ThresholdType.Otsu);
                // IplImage -> WriteableBitmap
                wb = dst.ToWriteableBitmap(PixelFormats.BlackWhite);
                //wb = WriteableBitmapConverter.ToWriteableBitmap(dst, PixelFormats.BlackWhite);
            }

            // WPFのWindowに表示してみる
            Image image = new Image { Source = wb };
            Window window = new Window
            {
                Title = "from IplImage to WriteableBitmap",
                Width = wb.PixelWidth,
                Height = wb.PixelHeight,
                Content = image
            };

            Application app = new Application();
            app.Run(window);
        }
Ejemplo n.º 24
0
        public void PreProcess()
        {
            //Cv.NamedWindow("anhthoai", WindowMode.AutoSize);
            IplConvKernel element = Cv.CreateStructuringElementEx(21, 3, 10, 2, ElementShape.Rect, null);
            timg = new IplImage(src.Size, BitDepth.U8, 1);
            IplImage temp = timg.Clone();
            IplImage dest = timg.Clone();
            src.CvtColor(timg, ColorConversion.RgbaToGray);
            pimg = timg.Clone();
            //Cv.Threshold(pimg, pimg, 128, 255, ThresholdType.Binary | ThresholdType.Otsu);
            Cv.Smooth(timg, timg, SmoothType.Gaussian);
            Cv.MorphologyEx(timg, dest, temp, element, MorphologyOperation.TopHat, 1);

            Cv.Threshold(dest, timg, 180, 255, ThresholdType.Binary | ThresholdType.Otsu);
            //Cv.AdaptiveThreshold(dest, timg, 255, AdaptiveThresholdType.MeanC, ThresholdType.Binary,75, 0);
            Cv.Smooth(timg, dest, SmoothType.Median);
            Cv.Dilate(dest, dest, element, 2);

            /*using (CvWindow window = new CvWindow("BoundingRect", WindowMode.AutoSize))
            {
                window.Image = dest;
                CvWindow.WaitKey(0);
            }*/
            //Cv.ShowImage("anhthoai", dest);
            Cv.ReleaseImage(temp);
            Cv.ReleaseImage(dest);
        }
Ejemplo n.º 25
0
        public DrawToHdc()
        {
            CvRect roi = new CvRect(320, 260, 100, 100);        // region of roosevelt's face

            using (IplImage src = new IplImage(Const.ImageYalta, LoadMode.Color))
            using (IplImage dst = new IplImage(roi.Size, BitDepth.U8, 3))
            {
                src.ROI = roi;

                using (Bitmap bitmap = new Bitmap(roi.Width, roi.Height, PixelFormat.Format32bppArgb))
                using (Graphics g = Graphics.FromImage(bitmap))
                {
                    //BitmapConverter.DrawToGraphics(src, g, new CvRect(new CvPoint(0, 0), roi.Size));
                    IntPtr hdc = g.GetHdc();
                    BitmapConverter.DrawToHdc(src, hdc, new CvRect(new CvPoint(0,0), roi.Size));
                    g.ReleaseHdc(hdc);

                    g.DrawString("Roosevelt", new Font(FontFamily.GenericSerif, 12), Brushes.Red, 20, 0);
                    g.DrawEllipse(new Pen(Color.Red, 4), new Rectangle(20, 20, roi.Width/2, roi.Height/2));

                    dst.CopyFrom(bitmap);
                }

                src.ResetROI();                

                using (new CvWindow("src", src))
                using (new CvWindow("dst", dst))
                {
                    Cv.WaitKey();
                }
            }
        }
Ejemplo n.º 26
0
        public ConvertToBitmapSource()
        {
            BitmapSource bs = null;

            // OpenCVによる画像処理 (Threshold)
            using (IplImage src = new IplImage(Const.ImageLenna, LoadMode.GrayScale))
            using (IplImage dst = new IplImage(src.Size, BitDepth.U8, 1))
            {
                src.Smooth(src, SmoothType.Gaussian, 5);
                src.Threshold(dst, 0, 255, ThresholdType.Otsu);
                // IplImage -> BitmapSource
                bs = dst.ToBitmapSource();
                //bs = BitmapSourceConverter.ToBitmapSource(dst);
            }

            // WPFのWindowに表示してみる
            Image image = new Image { Source = bs };
            Window window = new Window
            {
                Title = "from IplImage to BitmapSource",
                Width = bs.PixelWidth,
                Height = bs.PixelHeight,
                Content = image
            };

            Application app = new Application();
            app.Run(window);
        }
Ejemplo n.º 27
0
        public Serialization()
        {
            const string FileName = "serialization.dat";

            IplImage imgWrite = new IplImage(Const.ImageFruits, LoadMode.Color);
            IplImage imgRead;

            using (FileStream fs = new FileStream(FileName, FileMode.Create))
            {                
                BinaryFormatter bf = new BinaryFormatter();                
                bf.Serialize(fs, imgWrite);                
            }

            FileInfo info = new FileInfo(FileName);
            Console.WriteLine("{0} filesize:{1}bytes", info.Name, info.Length);
            
            using (FileStream fs = new FileStream(FileName, FileMode.Open))
            {
                BinaryFormatter bf = new BinaryFormatter();
                imgRead = (IplImage)bf.Deserialize(fs);                
            }

            Console.WriteLine("Source:      width:{0} height:{1} depth:{2} channels:{3} imagesize:{4}",
                    imgWrite.Width, imgWrite.Height, imgWrite.Depth, imgWrite.NChannels, imgWrite.ImageSize);
            Console.WriteLine("Deserialize: width:{0} height:{1} depth:{2} channels:{3} imagesize:{4}",
                    imgRead.Width, imgRead.Height, imgRead.Depth, imgRead.NChannels, imgRead.ImageSize);            

            using (new CvWindow("Source Image", imgWrite))
            using (new CvWindow("Deserialized Image", imgRead))
            {
                Cv.WaitKey();
            }
        }
Ejemplo n.º 28
0
        public SaveImage()
        {
            using (IplImage img = new IplImage(Const.Image16bit, LoadMode.Color))
            {
                // JPEG quality test
                img.SaveImage("q000.jpg", new JpegEncodingParam(0));
                img.SaveImage("q025.jpg", new JpegEncodingParam(25));
                img.SaveImage("q050.jpg", new JpegEncodingParam(50));
                img.SaveImage("q075.jpg", new JpegEncodingParam(75));
                img.SaveImage("q100.jpg", new JpegEncodingParam(100));

                using (IplImage q000 = new IplImage("q000.jpg", LoadMode.Color))
                using (IplImage q025 = new IplImage("q025.jpg", LoadMode.Color))
                using (IplImage q050 = new IplImage("q050.jpg", LoadMode.Color))
                using (IplImage q075 = new IplImage("q075.jpg", LoadMode.Color))
                using (IplImage q100 = new IplImage("q100.jpg", LoadMode.Color))
                using (CvWindow w000 = new CvWindow("quality 0", q000))
                using (CvWindow w025 = new CvWindow("quality 25", q025))
                using (CvWindow w050 = new CvWindow("quality 50", q050))
                using (CvWindow w075 = new CvWindow("quality 75", q075))
                using (CvWindow w100 = new CvWindow("quality 100", q100))
                {
                    Cv.WaitKey();
                }
            }
        }
Ejemplo n.º 29
0
        public IplImage Init(string fileName1)
        {
            _Obj = Cv.LoadImage(fileName1, LoadMode.GrayScale);
            _ObjColor = Cv.CreateImage(_Obj.Size, BitDepth.U8, 3);

            using (CvMemStorage storage = Cv.CreateMemStorage(0))
            {
                Cv.CvtColor(_Obj, _ObjColor, ColorConversion.GrayToBgr);



                Stopwatch watch = Stopwatch.StartNew();
                {
                    CvSURFParams param = new CvSURFParams(500, true);
                    Cv.ExtractSURF(_Obj, null, out _ObjectKeypoints, out _ObjectDescriptors, storage, param);
                    Console.WriteLine("Object Descriptors: {0}", _ObjectDescriptors.Total);
                }
                watch.Stop();


            }

            return _Obj;

        }
Ejemplo n.º 30
0
        public Resize()
        {
            // cvResize
            // 指定した出力画像サイズに合うように、入力画像のサイズを変更し出力する.

            // (1)画像を読み込む
            using (IplImage src = new IplImage(Const.ImageSquare5, LoadMode.AnyColor | LoadMode.AnyDepth))
            {
                // (2)出力用画像領域の確保を行なう
                CvSize size = new CvSize(src.Width * 2, src.Height * 2);
                using (IplImage dstNN = new IplImage(size, src.Depth, src.NChannels))
                using (IplImage dstCubic = new IplImage(size, src.Depth, src.NChannels))
                using (IplImage dstLinear = new IplImage(size, src.Depth, src.NChannels))
                using (IplImage dstLanczos = new IplImage(size, src.Depth, src.NChannels))                
                {
                    // (3)画像のサイズ変更を行う
                    Cv.Resize(src, dstNN, Interpolation.NearestNeighbor);
                    Cv.Resize(src, dstCubic, Interpolation.Cubic);                    
                    Cv.Resize(src, dstLinear, Interpolation.Linear);
                    Cv.Resize(src, dstLanczos, Interpolation.Lanczos4);

                    // (4)結果を表示する
                    using (new CvWindow("src", src))
                    using (new CvWindow("dst NearestNeighbor", dstNN))
                    using (new CvWindow("dst Cubic", dstCubic))
                    using (new CvWindow("dst Linear", dstLinear))
                    using (new CvWindow("dst Lanczos4", dstLanczos))                    
                    {
                        Cv.WaitKey();
                    }
                }
            }
        }
Ejemplo n.º 31
0
        public FitLine()
        {
            CvSize imageSize = new CvSize(500, 500);

            // cvFitLine
            CvPoint2D32f[] points = GetRandomPoints(20, imageSize);
            CvLine2D line = Cv.FitLine2D(points, DistanceType.L2, 0, 0.01, 0.01);

            using (IplImage img = new IplImage(imageSize, BitDepth.U8, 3))
            {
                img.Zero();

                // draw line
                {
                    CvPoint pt1, pt2;
                    line.FitSize(img.Width, img.Height, out pt1, out pt2);
                    img.Line(pt1, pt2, CvColor.Green, 1, LineType.Link8);
                }

                // draw points and distances
                using (CvFont font = new CvFont(FontFace.HersheySimplex, 0.33, 0.33))
                {
                    foreach (CvPoint2D32f p in points)
                    {
                        double d = line.Distance(p);

                        img.Circle(p, 2, CvColor.White, -1, LineType.AntiAlias);
                        img.PutText(string.Format("{0:F1}", d), new CvPoint((int) (p.X + 3), (int) (p.Y + 3)), font, CvColor.Green);
                    }
                }

                CvWindow.ShowImages(img);
            }
        }
Ejemplo n.º 32
0
        /// <summary>
        /// 
        /// </summary>
        public SeqPartition()
        {
            CvMemStorage storage = new CvMemStorage(0);
            pointSeq = new CvSeq<CvPoint>(SeqType.EltypeS32C2, CvSeq.SizeOf, storage);
            Random rand = new Random();
            canvas = new IplImage(Width, Height, BitDepth.U8, 3);

            colors = new CvScalar[Count];
            for (int i = 0; i < Count; i++)
            {
                CvPoint pt = new CvPoint
                {
                    X = rand.Next(Width),
                    Y = rand.Next(Height)
                };
                pointSeq.Push(pt);
                int icolor = rand.Next() | 0x00404040;
                colors[i] = Cv.RGB(icolor & 255, (icolor >> 8) & 255, (icolor >> 16) & 255);
            }

            using (window = new CvWindowEx() { Text = "points" })
            {
                window.CreateTrackbar("threshold", 10, 50, OnTrack);
                OnTrack(10);
                CvWindowEx.WaitKey();
            }
        }
Ejemplo n.º 33
0
 public IplImage GetIpl()
 {
     if (_ipl == null)
     {
         _ipl     = IplImage.FromFile(ImageFilePath);
         _ipl.ROI = FaceRect;
     }
     return(_ipl);
 }
Ejemplo n.º 34
0
        public static bool PreProcessFrame(Frame frame, ref Frame lastFrame)
        {
            OpenCvSharp.IplImage ipl = new OpenCvSharp.IplImage(frame.image);
            ipl.IsEnabledDispose = false;


            lastFrame            = frame;
            lastFrame.searchRect = new CvRect(0, 0, ipl.Width, ipl.Height);

            return(true);
        }
Ejemplo n.º 35
0
 public Frame(OpenCvSharp.IplImage ipl)
 {
     this.iplImage = ipl;
     this.InitializeFields();
 }
Ejemplo n.º 36
0
 public PersonOfInterest(IplImage iplImage, IplImage alignedImage)
     : this()
 {
     _ipl          = iplImage;
     _alignedImage = alignedImage;
 }
Ejemplo n.º 37
0
 /// <summary>
 ///
 /// </summary>
 /// <param name="imgGray"></param>
 /// <param name="pRects"></param>
 /// <param name="ptRotate"></param>
 /// <param name="dbAngleRotate"></param>
 /// <returns></returns>
 public bool TrackFace(IplImage imgGray, CvRect[] pRects, out CvPoint ptRotate, out double dbAngleRotate)
 {
     return(Cv.TrackFace(this, imgGray, pRects, out ptRotate, out dbAngleRotate));
 }
Ejemplo n.º 38
0
 public void PyrSegmentation(IplImage dst, CvMemStorage storage, out CvSeq comp, int level, double threshold1,
                             double threshold2);
Ejemplo n.º 39
0
        private void btnOk_Click(object sender, EventArgs e)
        {
            if (this.picTargetPerson.Image == null)
            {
                MessageBox.Show("请选定一张人脸图片");
                return;
            }

            if (drawRectangle == Rectangle.Empty)
            {
                MessageBox.Show("请定位人脸");
                return;
            }


            String oldFileName = this.picTargetPerson.Image.Tag as string;

            String fileName = System.Guid.NewGuid().ToString().ToUpper() + System.IO.Path.GetExtension(oldFileName);

            //搜索人脸
            OpenCvSharp.IplImage iplFace = BitmapConverter.ToIplImage((Bitmap)this.picTargetPerson.Image);

            string savePath = Path.Combine(FileSavePath, fileName);

            iplFace.SaveImage(savePath);

            //归一化
            OpenCvSharp.CvRect rect = new OpenCvSharp.CvRect(
                this.drawRectangle.X,
                this.drawRectangle.Y,
                this.drawRectangle.Width,
                this.drawRectangle.Height);

            OpenCvSharp.IplImage[] normalizedImages =
                Program.searcher.NormalizeImageForTraining(iplFace, rect);

            for (int i = 0; i < normalizedImages.Length; ++i)
            {
                string normalizedFaceName = string.Format("{0}_{1:d4}.jpg",
                                                          System.IO.Path.GetFileNameWithoutExtension(fileName), i);

                string fullPath = System.IO.Path.Combine(faceFeatureImagePath,
                                                         normalizedFaceName);

                normalizedImages[i].SaveImage(fullPath);
            }

            string id   = txtId.Text.ToString();
            string name = txtName.Text.ToString();
            string sex  = rabMan.Checked ? "男" : "女";
            int    age  = 0;

            int.TryParse(txtAge.Text, out age);

            string card = txtCard.Text.ToString();

            PersonInfo info = new PersonInfo();

            info.ID         = id;
            info.Name       = name;
            info.Sex        = sex;
            info.Age        = age;
            info.CardId     = card;
            info.FileName   = fileName;
            info.Similarity = 0;

            perinfo.WriteInfo(info);

            MessageBox.Show("添加成功");

            Array.ForEach(normalizedImages, ipl => ipl.Dispose());
        }
Ejemplo n.º 40
0
        /// <summary>
        /// find rectangular regions in the given image that are likely
        /// to contain objects and corresponding confidence levels
        /// </summary>
        /// <param name="image"></param>
        /// <param name="storage"></param>
        /// <returns></returns>
#else
        /// <summary>
        /// find rectangular regions in the given image that are likely
        /// to contain objects and corresponding confidence levels
        /// </summary>
        /// <param name="image">image to detect objects in</param>
        /// <param name="storage">memory storage to store the resultant sequence of the object candidate rectangles</param>
        /// <returns></returns>
#endif
        public CvSeq <CvObjectDetection> DetectObjects(IplImage image, CvMemStorage storage)
        {
            return(Cv.LatentSvmDetectObjects(image, this, storage));
        }
Ejemplo n.º 41
0
 public void PyrSegmentation(IplImage dst, int level, double threshold1, double threshold2);
Ejemplo n.º 42
0
        /// <summary>
        /// 一つのフレームをビデオファイルに書き込む/追加する
        /// </summary>
        /// <param name="image">書き込まれるフレーム</param>
        /// <returns></returns>
#else
        /// <summary>
        /// Writes/appends one frame to video file.
        /// </summary>
        /// <param name="image">the written frame.</param>
        /// <returns></returns>
#endif
        public int WriteFrame(IplImage image)
        {
            return(Cv.WriteFrame(this, image));
        }
Ejemplo n.º 43
0
        /// <summary>
        /// find rectangular regions in the given image that are likely
        /// to contain objects and corresponding confidence levels
        /// </summary>
        /// <param name="image"></param>
        /// <param name="storage"></param>
        /// <param name="overlap_threshold"></param>
        /// <param name="numThreads"></param>
        /// <returns></returns>
#else
        /// <summary>
        /// find rectangular regions in the given image that are likely
        /// to contain objects and corresponding confidence levels
        /// </summary>
        /// <param name="image">image to detect objects in</param>
        /// <param name="storage">memory storage to store the resultant sequence of the object candidate rectangles</param>
        /// <param name="overlap_threshold">threshold for the non-maximum suppression algorithm
        ///  = 0.5f [here will be the reference to original paper]</param>
        /// <param name="numThreads"></param>
        /// <returns></returns>
#endif
        public CvSeq <CvObjectDetection> DetectObjects(IplImage image, CvMemStorage storage, float overlap_threshold, int numThreads)
        {
            return(Cv.LatentSvmDetectObjects(image, this, storage, overlap_threshold, numThreads));
        }