Exemple #1
0
        public FitLine()
        {
            CvSize imageSize = new CvSize(500, 500);

            // cvFitLine
            CvPoint2D32f[] points = GetRandomPoints(20, imageSize);
            CvLine2D line = Cv.FitLine2D(points, DistanceType.L2, 0, 0.01, 0.01);

            using (IplImage img = new IplImage(imageSize, BitDepth.U8, 3))
            {
                img.Zero();

                // draw line
                {
                    CvPoint pt1, pt2;
                    line.FitSize(img.Width, img.Height, out pt1, out pt2);
                    img.Line(pt1, pt2, CvColor.Green, 1, LineType.Link8);
                }

                // draw points and distances
                using (CvFont font = new CvFont(FontFace.HersheySimplex, 0.33, 0.33))
                {
                    foreach (CvPoint2D32f p in points)
                    {
                        double d = line.Distance(p);

                        img.Circle(p, 2, CvColor.White, -1, LineType.AntiAlias);
                        img.PutText(string.Format("{0:F1}", d), new CvPoint((int) (p.X + 3), (int) (p.Y + 3)), font, CvColor.Green);
                    }
                }

                CvWindow.ShowImages(img);
            }
        }
Exemple #2
0
        public Contour()
        {
            // cvContourArea, cvArcLength
            // 輪郭によって区切られた領域の面積と,輪郭の長さを求める
            
            const int SIZE = 500;

            // (1)画像を確保し初期化する
            using (CvMemStorage storage = new CvMemStorage())
            using (IplImage img = new IplImage(SIZE, SIZE, BitDepth.U8, 3))
            {
                img.Zero();
                // (2)点列を生成する 
                CvSeq<CvPoint> points = new CvSeq<CvPoint>(SeqType.PolyLine, storage);
                CvRNG rng = new CvRNG((ulong)DateTime.Now.Ticks);
                double scale = rng.RandReal() + 0.5;
                CvPoint pt0 = new CvPoint
                {
                    X = (int)(Math.Cos(0) * SIZE / 4 * scale + SIZE / 2),
                    Y = (int)(Math.Sin(0) * SIZE / 4 * scale + SIZE / 2)
                };
                img.Circle(pt0, 2, CvColor.Green);
                points.Push(pt0);
                for (int i = 1; i < 20; i++)
                {
                    scale = rng.RandReal() + 0.5;
                    CvPoint pt1 = new CvPoint
                    {
                        X = (int)(Math.Cos(i * 2 * Math.PI / 20) * SIZE / 4 * scale + SIZE / 2),
                        Y = (int)(Math.Sin(i * 2 * Math.PI / 20) * SIZE / 4 * scale + SIZE / 2)
                    };
                    img.Line(pt0, pt1, CvColor.Green, 2);
                    pt0.X = pt1.X;
                    pt0.Y = pt1.Y;
                    img.Circle(pt0, 3, CvColor.Green, Cv.FILLED);
                    points.Push(pt0);
                }
                img.Line(pt0, points.GetSeqElem(0).Value, CvColor.Green, 2);
                // (3)包含矩形,面積,長さを求める
                CvRect rect = points.BoundingRect(false);
                double area = points.ContourArea();
                double length = points.ArcLength(CvSlice.WholeSeq, 1);
                // (4)結果を画像に書き込む
                img.Rectangle(new CvPoint(rect.X, rect.Y), new CvPoint(rect.X + rect.Width, rect.Y + rect.Height), CvColor.Red, 2);
                string text_area = string.Format("Area:   wrect={0}, contour={1}", rect.Width * rect.Height, area);
                string text_length = string.Format("Length: rect={0}, contour={1}", 2 * (rect.Width + rect.Height), length);
                using (CvFont font = new CvFont(FontFace.HersheySimplex, 0.7, 0.7, 0, 1, LineType.AntiAlias))
                {
                    img.PutText(text_area, new CvPoint(10, img.Height - 30), font, CvColor.White);
                    img.PutText(text_length, new CvPoint(10, img.Height - 10), font, CvColor.White);
                }
                // (5)画像を表示,キーが押されたときに終了 
                using (CvWindow window = new CvWindow("BoundingRect", WindowMode.AutoSize))
                {
                    window.Image = img;
                    CvWindow.WaitKey(0);
                }
            }
        }
Exemple #3
0
        public Watershed()
        {
            // cvWatershed
            // マウスで円形のマーカー(シード領域)の中心を指定し,複数のマーカーを設定する.
            // このマーカを画像のgradientに沿って広げて行き,gradientの高い部分に出来る境界を元に領域を分割する.
            // 領域は,最初に指定したマーカーの数に分割される. 

            // (2)画像の読み込み,マーカー画像の初期化,結果表示用画像領域の確保を行なう
            using (IplImage srcImg = new IplImage(Const.ImageGoryokaku, LoadMode.AnyDepth | LoadMode.AnyColor))
            using (IplImage dstImg = srcImg.Clone())
            using (IplImage dspImg = srcImg.Clone())
            using (IplImage markers = new IplImage(srcImg.Size, BitDepth.S32, 1))
            {
                markers.Zero();

                // (3)入力画像を表示しシードコンポーネント指定のためのマウスイベントを登録する
                using (CvWindow wImage = new CvWindow("image", WindowMode.AutoSize))
                {
                    wImage.Image = srcImg;
                    // クリックにより中心を指定し,円形のシード領域を設定する   
                    int seedNum = 0;
                    wImage.OnMouseCallback += delegate(MouseEvent ev, int x, int y, MouseEvent flags)
                    {
                        if (ev == MouseEvent.LButtonDown)
                        {
                            seedNum++;
                            CvPoint pt = new CvPoint(x, y);
                            markers.Circle(pt, 20, CvScalar.ScalarAll(seedNum), Cv.FILLED, LineType.Link8, 0);
                            dspImg.Circle(pt, 20, CvColor.White, 3, LineType.Link8, 0);
                            wImage.Image = dspImg;
                        }
                    };
                    CvWindow.WaitKey();
                }

                // (4)watershed分割を実行する  
                Cv.Watershed(srcImg, markers);

                // (5)実行結果の画像中のwatershed境界(ピクセル値=-1)を結果表示用画像上に表示する
                for (int i = 0; i < markers.Height; i++)
                {
                    for (int j = 0; j < markers.Width; j++)
                    {
                        int idx = (int)(markers.Get2D(i, j).Val0);
                        if (idx == -1)
                        {
                            dstImg.Set2D(i, j, CvColor.Red);
                        }
                    }
                }
                using (CvWindow wDst = new CvWindow("watershed transform", WindowMode.AutoSize))
                {
                    wDst.Image = dstImg;
                    CvWindow.WaitKey();
                }
            }

        }
Exemple #4
0
        public Histogram()
        {
            // cvCalcHist
            // コントラストや明度をいろいろ変えられるサンプル

            const int histSize = 64;
            float[] range0 = { 0, 256 };
            float[][] ranges = { range0 };

            // 画像の読み込み
            using (IplImage srcImg = new IplImage(Const.ImageLenna, LoadMode.GrayScale))
            using (IplImage dstImg = srcImg.Clone())
            using (IplImage histImg = new IplImage(new CvSize(400, 400), BitDepth.U8, 1))
            using (CvHistogram hist = new CvHistogram(new int[] { histSize }, HistogramFormat.Array, ranges, true))
            {
                using (CvWindow windowImage = new CvWindow("image", WindowMode.AutoSize))
                using (CvWindow windowHist = new CvWindow("histogram", WindowMode.AutoSize))
                {
                    // トラックバーが動かされた時の処理
                    CvTrackbar ctBrightness = null;
                    CvTrackbar ctContrast = null;
                    CvTrackbarCallback callback = delegate(int pos)
                    {
                        int brightness = ctBrightness.Pos - 100;
                        int contrast = ctContrast.Pos - 100;
                        // LUTの適用
                        byte[] lut = CalcLut(contrast, brightness);
                        srcImg.LUT(dstImg, lut);
                        // ヒストグラムの描画
                        CalcHist(dstImg, hist);
                        DrawHist(histImg, hist, histSize);
                        // ウィンドウに表示
                        windowImage.ShowImage(dstImg);
                        windowHist.ShowImage(histImg);
                        dstImg.Zero();
                        histImg.Zero();
                    };

                    // トラックバーの作成
                    // (OpenCVでは現在位置にポインタを渡すことでトラックバーの位置の変化が取得できるが、
                    // .NETではGCによりポインタが移動してしまうので廃止した。別の方法でうまく取得すべし。)
                    ctBrightness = windowImage.CreateTrackbar("brightness", 100, 200, callback);
                    ctContrast = windowImage.CreateTrackbar("contrast", 100, 200, callback);
                    // 初回描画
                    callback(0);

                    // キー入力待ち
                    Cv.WaitKey(0);
                }
            }
        }
Exemple #5
0
        public Watershed()
        {
            using (var srcImg = new IplImage(FilePath.Image.Goryokaku, LoadMode.AnyDepth | LoadMode.AnyColor))
            using (var dstImg = srcImg.Clone())
            using (var dspImg = srcImg.Clone())
            using (var markers = new IplImage(srcImg.Size, BitDepth.S32, 1))
            {
                markers.Zero();

                using (var window = new CvWindow("image", WindowMode.AutoSize))
                {
                    window.Image = srcImg;
                    // Mouse event  
                    int seedNum = 0;
                    window.OnMouseCallback += delegate(MouseEvent ev, int x, int y, MouseEvent flags)
                    {
                        if (ev == MouseEvent.LButtonDown)
                        {
                            seedNum++;
                            CvPoint pt = new CvPoint(x, y);
                            markers.Circle(pt, 20, CvScalar.ScalarAll(seedNum), Cv.FILLED, LineType.Link8, 0);
                            dspImg.Circle(pt, 20, CvColor.White, 3, LineType.Link8, 0);
                            window.Image = dspImg;
                        }
                    };
                    CvWindow.WaitKey();
                }

                Cv.Watershed(srcImg, markers);

                // draws watershed
                for (int i = 0; i < markers.Height; i++)
                {
                    for (int j = 0; j < markers.Width; j++)
                    {
                        int idx = (int)(markers.Get2D(i, j).Val0);
                        if (idx == -1)
                        {
                            dstImg.Set2D(i, j, CvColor.Red);
                        }
                    }
                }
                using (CvWindow wDst = new CvWindow("watershed transform", WindowMode.AutoSize))
                {
                    wDst.Image = dstImg;
                    CvWindow.WaitKey();
                }
            }

        }
Exemple #6
0
 private static void FilterByMaximalBlob(IplImage imgSrc, IplImage imgDst)
 {
     using (CvBlobs blobs = new CvBlobs())
     using (IplImage imgLabelData = new IplImage(imgSrc.Size, CvBlobLib.DepthLabel, 1))
     {
         imgDst.Zero();
         blobs.Label(imgSrc, imgLabelData);
         CvBlob max = blobs[blobs.GreaterBlob()];
         if (max == null)
         {
             return;
         }
         blobs.FilterByArea(max.Area, max.Area);
         blobs.FilterLabels(imgLabelData, imgDst);
     }
 }
Exemple #7
0
        public BoundingRect()
        {
            // cvBoundingRect 
            // 点列を包含する矩形を求める

            // (1)画像とメモリストレージを確保し初期化する
            // (メモリストレージは、CvSeqを使わないのであれば不要)
            using (IplImage img = new IplImage(640, 480, BitDepth.U8, 3))
            using (CvMemStorage storage = new CvMemStorage(0))
            {
                img.Zero();
                CvRNG rng = new CvRNG(DateTime.Now);
                // (2)点列を生成する
                ///*
                // お手軽な方法 (普通の配列を使う)
                CvPoint[] points = new CvPoint[50];
                for (int i = 0; i < 50; i++)
                {
                    points[i] = new CvPoint()
                    {
                        X = (int)(rng.RandInt() % (img.Width / 2) + img.Width / 4),
                        Y = (int)(rng.RandInt() % (img.Height / 2) + img.Height / 4)
                    };
                    img.Circle(points[i], 3, new CvColor(0, 255, 0), Cv.FILLED);
                }
                //*/
                /*
                // サンプルに準拠した方法 (CvSeqを使う)
                CvSeq points = new CvSeq(SeqType.EltypePoint, CvSeq.SizeOf, CvPoint.SizeOf, storage);
                for (int i = 0; i < 50; i++) {
                    CvPoint pt = new CvPoint();
                    pt.X = (int)(rng.RandInt() % (img.Width / 2) + img.Width / 4);
                    pt.Y = (int)(rng.RandInt() % (img.Height / 2) + img.Height / 4);
                    points.Push(pt);
                    img.Circle(pt, 3, new CvColor(0, 255, 0), Cv.FILLED);
                }
                //*/
                // (3)点列を包含する矩形を求めて描画する
                CvRect rect = Cv.BoundingRect(points);
                img.Rectangle(new CvPoint(rect.X, rect.Y), new CvPoint(rect.X + rect.Width, rect.Y + rect.Height), new CvColor(255, 0, 0), 2);
                // (4)画像の表示,キーが押されたときに終了 
                using (CvWindow w = new CvWindow("BoundingRect", WindowMode.AutoSize, img))
                {
                    CvWindow.WaitKey(0);
                }
            }
        }
        /// <summary>
        /// バックプロジェクションにより肌色領域を求める
        /// </summary>
        /// <param name="imgSrc"></param>
        /// <param name="hsvPlanes"></param>
        /// <param name="imgRender"></param>
        private void RetrieveFleshRegion(IplImage imgSrc, IplImage[] hsvPlanes, IplImage imgDst)
        {
            int[] histSize = new int[] {30, 32};
            float[] hRanges = {0.0f, 20f};
            float[] sRanges = {50f, 255f};
            float[][] ranges = {hRanges, sRanges};

            imgDst.Zero();
            using (CvHistogram hist = new CvHistogram(histSize, HistogramFormat.Array, ranges, true))
            {
                hist.Calc(hsvPlanes, false, null);
                float minValue, maxValue;
                hist.GetMinMaxValue(out minValue, out maxValue);
                hist.Normalize(imgSrc.Width * imgSrc.Height * 255 / maxValue);
                hist.CalcBackProject(hsvPlanes, imgDst);
            }
        }
Exemple #9
0
        public Histogram()
        {
            // cvCalcHist

            const int histSize = 64;
            float[] range0 = { 0, 256 };
            float[][] ranges = { range0 };

            using (IplImage srcImg = new IplImage(FilePath.Image.Lenna, LoadMode.GrayScale))
            using (IplImage dstImg = srcImg.Clone())
            using (IplImage histImg = new IplImage(new CvSize(400, 400), BitDepth.U8, 1))
            using (CvHistogram hist = new CvHistogram(new int[] { histSize }, HistogramFormat.Array, ranges, true))
            {
                using (CvWindow windowImage = new CvWindow("image", WindowMode.AutoSize))
                using (CvWindow windowHist = new CvWindow("histogram", WindowMode.AutoSize))
                {
                    CvTrackbar ctBrightness = null;
                    CvTrackbar ctContrast = null;
                    CvTrackbarCallback callback = delegate(int pos)
                    {
                        int brightness = ctBrightness.Pos - 100;
                        int contrast = ctContrast.Pos - 100;
                        // perform LUT
                        byte[] lut = CalcLut(contrast, brightness);
                        srcImg.LUT(dstImg, lut);
                        // draws histogram
                        CalcHist(dstImg, hist);
                        DrawHist(histImg, hist, histSize);

                        windowImage.ShowImage(dstImg);
                        windowHist.ShowImage(histImg);
                        dstImg.Zero();
                        histImg.Zero();
                    };

                    ctBrightness = windowImage.CreateTrackbar("brightness", 100, 200, callback);
                    ctContrast = windowImage.CreateTrackbar("contrast", 100, 200, callback);
                    // initial action
                    callback(0);

                    Cv.WaitKey(0);
                }
            }
        }
Exemple #10
0
        public SVM()
        {
            // CvSVM
            // SVMを利用して2次元ベクトルの3クラス分類問題を解く

            const int S = 1000;
            const int SIZE = 400;
            CvRNG rng = new CvRNG((ulong)DateTime.Now.Ticks);

            // (1)画像領域の確保と初期化
            using (IplImage img = new IplImage(SIZE, SIZE, BitDepth.U8, 3))
            {
                img.Zero();
                // (2)学習データの生成
                CvPoint[] pts = new CvPoint[S];
                int[] res = new int[S];
                for (int i = 0; i < S; i++)
                {
                    pts[i].X = (int)(rng.RandInt() % SIZE);
                    pts[i].Y = (int)(rng.RandInt() % SIZE);
                    if (pts[i].Y > 50 * Math.Cos(pts[i].X * Cv.PI / 100) + 200)
                    {
                        img.Line(new CvPoint(pts[i].X - 2, pts[i].Y - 2), new CvPoint(pts[i].X + 2, pts[i].Y + 2), new CvColor(255, 0, 0));
                        img.Line(new CvPoint(pts[i].X + 2, pts[i].Y - 2), new CvPoint(pts[i].X - 2, pts[i].Y + 2), new CvColor(255, 0, 0));
                        res[i] = 1;
                    }
                    else
                    {
                        if (pts[i].X > 200)
                        {
                            img.Line(new CvPoint(pts[i].X - 2, pts[i].Y - 2), new CvPoint(pts[i].X + 2, pts[i].Y + 2), new CvColor(0, 255, 0));
                            img.Line(new CvPoint(pts[i].X + 2, pts[i].Y - 2), new CvPoint(pts[i].X - 2, pts[i].Y + 2), new CvColor(0, 255, 0));
                            res[i] = 2;
                        }
                        else
                        {
                            img.Line(new CvPoint(pts[i].X - 2, pts[i].Y - 2), new CvPoint(pts[i].X + 2, pts[i].Y + 2), new CvColor(0, 0, 255));
                            img.Line(new CvPoint(pts[i].X + 2, pts[i].Y - 2), new CvPoint(pts[i].X - 2, pts[i].Y + 2), new CvColor(0, 0, 255));
                            res[i] = 3;
                        }
                    }
                }

                // (3)学習データの表示
                Cv.NamedWindow("SVM", WindowMode.AutoSize);
                Cv.ShowImage("SVM", img);
                Cv.WaitKey(0);

                // (4)学習パラメータの生成
                float[] data = new float[S * 2];
                for (int i = 0; i < S; i++)
                {
                    data[i * 2] = ((float)pts[i].X) / SIZE;
                    data[i * 2 + 1] = ((float)pts[i].Y) / SIZE;
                }

                // (5)SVMの学習
                using (CvSVM svm = new CvSVM())
                {
                    CvMat data_mat = new CvMat(S, 2, MatrixType.F32C1, data);
                    CvMat res_mat = new CvMat(S, 1, MatrixType.S32C1, res);
                    CvTermCriteria criteria = new CvTermCriteria(1000, float.Epsilon);
                    CvSVMParams param = new CvSVMParams(SVMType.CSvc, SVMKernelType.Rbf, 10.0, 8.0, 1.0, 10.0, 0.5, 0.1, null, criteria);
                    svm.Train(data_mat, res_mat, null, null, param);

                    // (6)学習結果の描画
                    for (int i = 0; i < SIZE; i++)
                    {
                        for (int j = 0; j < SIZE; j++)
                        {
                            float[] a = { (float)j / SIZE, (float)i / SIZE };
                            CvMat m = new CvMat(1, 2, MatrixType.F32C1, a);
                            float ret = svm.Predict(m);
                            CvColor color = new CvColor();
                            switch ((int)ret)
                            {
                                case 1:
                                    color = new CvColor(100, 0, 0); break;
                                case 2:
                                    color = new CvColor(0, 100, 0); break;
                                case 3:
                                    color = new CvColor(0, 0, 100); break;
                            }
                            img[i, j] = color;
                        }
                    }

                    // (7)トレーニングデータの再描画
                    for (int i = 0; i < S; i++)
                    {
                        CvColor color = new CvColor();
                        switch (res[i])
                        {
                            case 1:
                                color = new CvColor(255, 0, 0); break;
                            case 2:
                                color = new CvColor(0, 255, 0); break;
                            case 3:
                                color = new CvColor(0, 0, 255); break;
                        }
                        img.Line(new CvPoint(pts[i].X - 2, pts[i].Y - 2), new CvPoint(pts[i].X + 2, pts[i].Y + 2), color);
                        img.Line(new CvPoint(pts[i].X + 2, pts[i].Y - 2), new CvPoint(pts[i].X - 2, pts[i].Y + 2), color);
                    }

                    // (8)サポートベクターの描画
                    int sv_num = svm.GetSupportVectorCount();
                    for (int i = 0; i < sv_num; i++)
                    {
                        var support = svm.GetSupportVector(i);
                        img.Circle(new CvPoint((int)(support[0] * SIZE), (int)(support[1] * SIZE)), 5, new CvColor(200, 200, 200));
                    }

                    // (9)画像の表示
                    Cv.NamedWindow("SVM", WindowMode.AutoSize);
                    Cv.ShowImage("SVM", img);
                    Cv.WaitKey(0);
                    Cv.DestroyWindow("SVM");

                }
            }

        }
Exemple #11
0
        public Inpaint()
        {
            // cvInpaint
            // 画像の不要な文字列部分に対するマスク画像を指定して文字列を除去する

            Console.WriteLine(
                "Hot keys: \n" +
                "\tESC - quit the program\n" +
                "\tr - restore the original image\n" +
                "\ti or ENTER - run inpainting algorithm\n" +
                "\t\t(before running it, paint something on the image)\n" +
                "\ts - save the original image, mask image, original+mask image and inpainted image to desktop."
            );

            // 原画像の読み込み
            using (IplImage img0 = new IplImage(Const.ImageFruits, LoadMode.AnyDepth | LoadMode.AnyColor))
            {
                // お絵かき用の画像を確保(マスク)
                using (IplImage img = img0.Clone())
                using (IplImage inpaintMask = new IplImage(img0.Size, BitDepth.U8, 1))
                // Inpaintの出力先画像を確保
                using (IplImage inpainted = img0.Clone())
                {
                    inpainted.Zero();
                    inpaintMask.Zero();

                    using (CvWindow wImage = new CvWindow("image", WindowMode.AutoSize, img))
                    {

                        // マウスイベントの処理
                        CvPoint prevPt = new CvPoint(-1, -1);
                        wImage.OnMouseCallback += delegate(MouseEvent ev, int x, int y, MouseEvent flags)
                        {
                            if (ev == MouseEvent.LButtonUp || (flags & MouseEvent.FlagLButton) == 0)
                            {
                                prevPt = new CvPoint(-1, -1);
                            }
                            else if (ev == MouseEvent.LButtonDown)
                            {
                                prevPt = new CvPoint(x, y);
                            }
                            else if (ev == MouseEvent.MouseMove && (flags & MouseEvent.FlagLButton) != 0)
                            {
                                CvPoint pt = new CvPoint(x, y);
                                if (prevPt.X < 0)
                                {
                                    prevPt = pt;
                                }
                                inpaintMask.Line(prevPt, pt, CvColor.White, 5, LineType.AntiAlias, 0);
                                img.Line(prevPt, pt, CvColor.White, 5, LineType.AntiAlias, 0);
                                prevPt = pt;
                                wImage.ShowImage(img);
                            }
                        };

                        for (; ; )
                        {
                            switch ((char)CvWindow.WaitKey(0))
                            {
                                case (char)27:    // ESCキーで終了
                                    CvWindow.DestroyAllWindows();
                                    return;
                                case 'r':   // 原画像を復元
                                    inpaintMask.Zero();
                                    img0.Copy(img);
                                    wImage.ShowImage(img);
                                    break;
                                case 'i':   // Inpaintの実行
                                case '\r':
                                    CvWindow wInpaint = new CvWindow("inpainted image", WindowMode.AutoSize);
                                    img.Inpaint(inpaintMask, inpainted, 3, InpaintMethod.Telea);
                                    wInpaint.ShowImage(inpainted);
                                    break;
                                case 's': // 画像の保存
                                    string desktop = Environment.GetFolderPath(Environment.SpecialFolder.Desktop);
                                    img0.SaveImage(Path.Combine(desktop, "original.png"));
                                    inpaintMask.SaveImage(Path.Combine(desktop, "mask.png"));
                                    img.SaveImage(Path.Combine(desktop, "original+mask.png"));
                                    inpainted.SaveImage(Path.Combine(desktop, "inpainted.png"));
                                    break;
                            }
                        }

                    }

                }
            }

        }
Exemple #12
0
        /// <summary>
        /// ラベリングにより最大の面積の領域を残す
        /// </summary>
        /// <param name="imgSrc"></param>
        /// <param name="imgRender"></param>
        private void FilterByMaximalBlob(IplImage imgSrc, IplImage imgDst)
        {
            CvBlobs blobs = new CvBlobs();

            imgDst.Zero();
            blobs.Label(imgSrc);
            CvBlob max = blobs.GreaterBlob();
            if (max == null)
                return;
            blobs.FilterByArea(max.Area, max.Area);
            blobs.FilterLabels(imgDst);
        }
Exemple #13
0
        public FindContours()
        {
            // cvFindContoursm cvDrawContours
            // 画像中から輪郭を検出し,-1~+1までのレベルにある輪郭を描画する

            const int SIZE = 500;

            using (IplImage img = new IplImage(SIZE, SIZE, BitDepth.U8, 1))
            {
                // 画像の初期化
                img.Zero();
                for (int i = 0; i < 6; i++)
                {
                    int dx = (i % 2) * 250 - 30;
                    int dy = (i / 2) * 150;
                    if (i == 0)
                    {
                        for (int j = 0; j <= 10; j++)
                        {
                            double angle = (j + 5) * Cv.PI / 21;
                            CvPoint p1 = new CvPoint(Cv.Round(dx + 100 + j * 10 - 80 * Math.Cos(angle)), Cv.Round(dy + 100 - 90 * Math.Sin(angle)));
                            CvPoint p2 = new CvPoint(Cv.Round(dx + 100 + j * 10 - 30 * Math.Cos(angle)), Cv.Round(dy + 100 - 30 * Math.Sin(angle)));
                            Cv.Line(img, p1, p2, CvColor.White, 1, LineType.AntiAlias, 0);
                        }
                    }
                    Cv.Ellipse(img, new CvPoint(dx + 150, dy + 100), new CvSize(100, 70), 0, 0, 360, CvColor.White, -1, LineType.AntiAlias, 0);
                    Cv.Ellipse(img, new CvPoint(dx + 115, dy + 70), new CvSize(30, 20), 0, 0, 360, CvColor.Black, -1, LineType.AntiAlias, 0);
                    Cv.Ellipse(img, new CvPoint(dx + 185, dy + 70), new CvSize(30, 20), 0, 0, 360, CvColor.Black, -1, LineType.AntiAlias, 0);
                    Cv.Ellipse(img, new CvPoint(dx + 115, dy + 70), new CvSize(15, 15), 0, 0, 360, CvColor.White, -1, LineType.AntiAlias, 0);
                    Cv.Ellipse(img, new CvPoint(dx + 185, dy + 70), new CvSize(15, 15), 0, 0, 360, CvColor.White, -1, LineType.AntiAlias, 0);
                    Cv.Ellipse(img, new CvPoint(dx + 115, dy + 70), new CvSize(5, 5), 0, 0, 360, CvColor.Black, -1, LineType.AntiAlias, 0);
                    Cv.Ellipse(img, new CvPoint(dx + 185, dy + 70), new CvSize(5, 5), 0, 0, 360, CvColor.Black, -1, LineType.AntiAlias, 0);
                    Cv.Ellipse(img, new CvPoint(dx + 150, dy + 100), new CvSize(10, 5), 0, 0, 360, CvColor.Black, -1, LineType.AntiAlias, 0);
                    Cv.Ellipse(img, new CvPoint(dx + 150, dy + 150), new CvSize(40, 10), 0, 0, 360, CvColor.Black, -1, LineType.AntiAlias, 0);
                    Cv.Ellipse(img, new CvPoint(dx + 27, dy + 100), new CvSize(20, 35), 0, 0, 360, CvColor.White, -1, LineType.AntiAlias, 0);
                    Cv.Ellipse(img, new CvPoint(dx + 273, dy + 100), new CvSize(20, 35), 0, 0, 360, CvColor.White, -1, LineType.AntiAlias, 0);
                }

                // 輪郭の検出
                CvSeq<CvPoint> contours;
                CvMemStorage storage = new CvMemStorage();
                // native style
                Cv.FindContours(img, storage, out contours, CvContour.SizeOf, ContourRetrieval.Tree, ContourChain.ApproxSimple);
                contours = Cv.ApproxPoly(contours, CvContour.SizeOf, storage, ApproxPolyMethod.DP, 3, true);
                
                // wrapper style
                //img.FindContours(storage, out contours, ContourRetrieval.Tree, ContourChain.ApproxSimple);
                //contours = contours.ApproxPoly(storage, ApproxPolyMethod.DP, 3, true);

                // ウィンドウに表示
                using (CvWindow window_image = new CvWindow("image", img))
                using (CvWindow window_contours = new CvWindow("contours"))
                {
                    CvTrackbarCallback onTrackbar = delegate(int pos)
                    {
                        IplImage cnt_img = new IplImage(SIZE, SIZE, BitDepth.U8, 3);
                        CvSeq<CvPoint> _contours = contours;
                        int levels = pos - 3;
                        if (levels <= 0) // get to the nearest face to make it look more funny
                        {
                            //_contours = _contours.HNext.HNext.HNext;
                        }
                        cnt_img.Zero();
                        Cv.DrawContours(cnt_img, _contours, CvColor.Red, CvColor.Green, levels, 3, LineType.AntiAlias);
                        window_contours.ShowImage(cnt_img);
                        cnt_img.Dispose();
                    };
                    window_contours.CreateTrackbar("levels+3", 3, 7, onTrackbar);
                    onTrackbar(3);

                    Cv.WaitKey();
                }
            }

        }
Exemple #14
0
        /// <summary>
        /// Classical Multidimensional Scaling
        /// </summary>
        public MDS()
        {
            // creates distance matrix
            int size = CityDistance.GetLength(0);
            CvMat t = new CvMat(size, size, MatrixType.F64C1, CityDistance);
            // adds Torgerson's additive constant to t
            t += Torgerson(t);
            // squares all elements of t
            t.Mul(t, t);

            // centering matrix G
            CvMat g = CenteringMatrix(size);
            // calculates inner product matrix B
            CvMat b = g * t * g.T() * -0.5;
            // calculates eigenvalues and eigenvectors of B
            CvMat vectors = new CvMat(size, size, MatrixType.F64C1);
            CvMat values = new CvMat(size, 1, MatrixType.F64C1);
            Cv.EigenVV(b, vectors, values);
            
            for (int r = 0; r < values.Rows; r++)
            {
                if (values[r] < 0)                
                    values[r] = 0;                
            }

            // multiplies sqrt(eigenvalue) by eigenvector
            CvMat result = vectors.GetRows(0, 2);
            for (int r = 0; r < result.Rows; r++)
            {
                for (int c = 0; c < result.Cols; c++)
                {
                    result[r, c] *= Math.Sqrt(values[r]);
                }                
            }

            // scaling
            Cv.Normalize(result, result, 0, 800, NormType.MinMax);

            //Console.WriteLine(vectors);
            //Console.WriteLine(values);
            //Console.WriteLine(result);

            // opens a window
            using (IplImage img = new IplImage(800, 600, BitDepth.U8, 3))
            using (CvFont font = new CvFont(FontFace.HersheySimplex, 0.5f, 0.5f))
            using (CvWindow window = new CvWindow("City Location Estimation"))
            {
                img.Zero();
                for (int c = 0; c < size; c++)
                {
                    double x = result[0, c];
                    double y = result[1, c];
                    x = x * 0.7 + img.Width * 0.1;
                    y = y * 0.7 + img.Height * 0.1;
                    img.Circle((int)x, (int)y, 5, CvColor.Red, -1);
                    img.PutText(CityNames[c], new CvPoint((int)x+5, (int)y+10), font, CvColor.White);
                }
                window.Image = img;
                Cv.WaitKey();
            }
        }