public static float[] ResizeIplTo(IplImage Face, int width, int height)
        {
            IplImage smallerFace =
                new IplImage(new OpenCvSharp.CvSize(width, height),
                                         Face.Depth, Face.NChannels);

            Face.Resize(smallerFace, Interpolation.Linear);

            unsafe
            {
                byte* smallFaceData = smallerFace.ImageDataPtr;
                float[] currentFace = new float[width * height * smallerFace.NChannels * BytesPerPixel(Face.Depth)];
                for (int i = 0; i < smallerFace.Height; i++)
                {
                    for (int j = 0; j < smallerFace.Width; j++)
                    {
                        currentFace[i * smallerFace.WidthStep + j] =
                            (float)smallFaceData[i * smallerFace.WidthStep + j];
                    }
                }

                smallerFace.Dispose();

                return currentFace;
            }
        }
Exemplo n.º 2
0
        public FindContours()
        {
            // cvFindContoursm cvDrawContours
            // 画像中から輪郭を検出し,-1~+1までのレベルにある輪郭を描画する

            const int SIZE = 500;

            using (IplImage img = new IplImage(SIZE, SIZE, BitDepth.U8, 1))
            {
                // 画像の初期化
                img.Zero();
                for (int i = 0; i < 6; i++)
                {
                    int dx = (i % 2) * 250 - 30;
                    int dy = (i / 2) * 150;
                    if (i == 0)
                    {
                        for (int j = 0; j <= 10; j++)
                        {
                            double angle = (j + 5) * Cv.PI / 21;
                            CvPoint p1 = new CvPoint(Cv.Round(dx + 100 + j * 10 - 80 * Math.Cos(angle)), Cv.Round(dy + 100 - 90 * Math.Sin(angle)));
                            CvPoint p2 = new CvPoint(Cv.Round(dx + 100 + j * 10 - 30 * Math.Cos(angle)), Cv.Round(dy + 100 - 30 * Math.Sin(angle)));
                            Cv.Line(img, p1, p2, CvColor.White, 1, LineType.AntiAlias, 0);
                        }
                    }
                    Cv.Ellipse(img, new CvPoint(dx + 150, dy + 100), new CvSize(100, 70), 0, 0, 360, CvColor.White, -1, LineType.AntiAlias, 0);
                    Cv.Ellipse(img, new CvPoint(dx + 115, dy + 70), new CvSize(30, 20), 0, 0, 360, CvColor.Black, -1, LineType.AntiAlias, 0);
                    Cv.Ellipse(img, new CvPoint(dx + 185, dy + 70), new CvSize(30, 20), 0, 0, 360, CvColor.Black, -1, LineType.AntiAlias, 0);
                    Cv.Ellipse(img, new CvPoint(dx + 115, dy + 70), new CvSize(15, 15), 0, 0, 360, CvColor.White, -1, LineType.AntiAlias, 0);
                    Cv.Ellipse(img, new CvPoint(dx + 185, dy + 70), new CvSize(15, 15), 0, 0, 360, CvColor.White, -1, LineType.AntiAlias, 0);
                    Cv.Ellipse(img, new CvPoint(dx + 115, dy + 70), new CvSize(5, 5), 0, 0, 360, CvColor.Black, -1, LineType.AntiAlias, 0);
                    Cv.Ellipse(img, new CvPoint(dx + 185, dy + 70), new CvSize(5, 5), 0, 0, 360, CvColor.Black, -1, LineType.AntiAlias, 0);
                    Cv.Ellipse(img, new CvPoint(dx + 150, dy + 100), new CvSize(10, 5), 0, 0, 360, CvColor.Black, -1, LineType.AntiAlias, 0);
                    Cv.Ellipse(img, new CvPoint(dx + 150, dy + 150), new CvSize(40, 10), 0, 0, 360, CvColor.Black, -1, LineType.AntiAlias, 0);
                    Cv.Ellipse(img, new CvPoint(dx + 27, dy + 100), new CvSize(20, 35), 0, 0, 360, CvColor.White, -1, LineType.AntiAlias, 0);
                    Cv.Ellipse(img, new CvPoint(dx + 273, dy + 100), new CvSize(20, 35), 0, 0, 360, CvColor.White, -1, LineType.AntiAlias, 0);
                }

                // 輪郭の検出
                CvSeq<CvPoint> contours;
                CvMemStorage storage = new CvMemStorage();
                // native style
                Cv.FindContours(img, storage, out contours, CvContour.SizeOf, ContourRetrieval.Tree, ContourChain.ApproxSimple);
                contours = Cv.ApproxPoly(contours, CvContour.SizeOf, storage, ApproxPolyMethod.DP, 3, true);
                
                // wrapper style
                //img.FindContours(storage, out contours, ContourRetrieval.Tree, ContourChain.ApproxSimple);
                //contours = contours.ApproxPoly(storage, ApproxPolyMethod.DP, 3, true);

                // ウィンドウに表示
                using (CvWindow window_image = new CvWindow("image", img))
                using (CvWindow window_contours = new CvWindow("contours"))
                {
                    CvTrackbarCallback onTrackbar = delegate(int pos)
                    {
                        IplImage cnt_img = new IplImage(SIZE, SIZE, BitDepth.U8, 3);
                        CvSeq<CvPoint> _contours = contours;
                        int levels = pos - 3;
                        if (levels <= 0) // get to the nearest face to make it look more funny
                        {
                            //_contours = _contours.HNext.HNext.HNext;
                        }
                        cnt_img.Zero();
                        Cv.DrawContours(cnt_img, _contours, CvColor.Red, CvColor.Green, levels, 3, LineType.AntiAlias);
                        window_contours.ShowImage(cnt_img);
                        cnt_img.Dispose();
                    };
                    window_contours.CreateTrackbar("levels+3", 3, 7, onTrackbar);
                    onTrackbar(3);

                    Cv.WaitKey();
                }
            }

        }
Exemplo n.º 3
0
        /// <summary>
        /// returns sequence of squares detected on the image.
        /// the sequence is stored in the specified memory storage
        /// </summary>
        /// <param name="img"></param>
        /// <param name="storage"></param>
        /// <returns></returns>
        static CvPoint[] FindSquares4(IplImage img, CvMemStorage storage)
        {
            const int N = 11;

            CvSize sz = new CvSize(img.Width & -2, img.Height & -2);
            IplImage timg = img.Clone(); // make a copy of input image
            IplImage gray = new IplImage(sz, BitDepth.U8, 1);
            IplImage pyr = new IplImage(sz.Width / 2, sz.Height / 2, BitDepth.U8, 3);
            // create empty sequence that will contain points -
            // 4 points per square (the square's vertices)
            CvSeq<CvPoint> squares = new CvSeq<CvPoint>(SeqType.Zero, CvSeq.SizeOf, storage);

            // select the maximum ROI in the image
            // with the width and height divisible by 2
            timg.ROI = new CvRect(0, 0, sz.Width, sz.Height);

            // down-Scale and upscale the image to filter out the noise
            Cv.PyrDown(timg, pyr, CvFilter.Gaussian5x5);
            Cv.PyrUp(pyr, timg, CvFilter.Gaussian5x5);
            IplImage tgray = new IplImage(sz, BitDepth.U8, 1);

            // find squares in every color plane of the image
            for (int c = 0; c < 3; c++)
            {
                // extract the c-th color plane
                timg.COI = c + 1;
                Cv.Copy(timg, tgray, null);

                // try several threshold levels
                for (int l = 0; l < N; l++)
                {
                    // hack: use Canny instead of zero threshold level.
                    // Canny helps to catch squares with gradient shading   
                    if (l == 0)
                    {
                        // apply Canny. Take the upper threshold from slider
                        // and set the lower to 0 (which forces edges merging) 
                        Cv.Canny(tgray, gray, 0, Thresh, ApertureSize.Size5);
                        // dilate canny output to remove potential
                        // holes between edge segments 
                        Cv.Dilate(gray, gray, null, 1);
                    }
                    else
                    {
                        // apply threshold if l!=0:
                        //     tgray(x,y) = gray(x,y) < (l+1)*255/N ? 255 : 0
                        Cv.Threshold(tgray, gray, (l + 1) * 255.0 / N, 255, ThresholdType.Binary);
                    }

                    // find contours and store them all as a list
                    CvSeq<CvPoint> contours;
                    Cv.FindContours(gray, storage, out contours, CvContour.SizeOf, ContourRetrieval.List, ContourChain.ApproxSimple, new CvPoint(0, 0));

                    // test each contour
                    while (contours != null)
                    {
                        // approximate contour with accuracy proportional
                        // to the contour perimeter
                        CvSeq<CvPoint> result = Cv.ApproxPoly(contours, CvContour.SizeOf, storage, ApproxPolyMethod.DP, contours.ContourPerimeter() * 0.02, false);
                        // square contours should have 4 vertices after approximation
                        // relatively large area (to filter out noisy contours)
                        // and be convex.
                        // Note: absolute value of an area is used because
                        // area may be positive or negative - in accordance with the
                        // contour orientation
                        if (result.Total == 4 && Math.Abs(result.ContourArea(CvSlice.WholeSeq)) > 1000 && result.CheckContourConvexity())
                        {
                            double s = 0;

                            for (int i = 0; i < 5; i++)
                            {
                                // find minimum Angle between joint
                                // edges (maximum of cosine)
                                if (i >= 2)
                                {
                                    double t = Math.Abs(Angle(result[i].Value, result[i - 2].Value, result[i - 1].Value));
                                    s = s > t ? s : t;
                                }
                            }

                            // if cosines of all angles are small
                            // (all angles are ~90 degree) then write quandrange
                            // vertices to resultant sequence 
                            if (s < 0.3)
                            {
                                for (int i = 0; i < 4; i++)
                                {
                                    //Console.WriteLine(result[i]);
                                    squares.Push(result[i].Value);
                                }
                            }
                        }

                        // take the next contour
                        contours = contours.HNext;
                    }
                }
            }

            // release all the temporary images
            gray.Dispose();
            pyr.Dispose();
            tgray.Dispose();
            timg.Dispose();

            return squares.ToArray();
        }
Exemplo n.º 4
0
        static void Main(string[] args)
        {
            //  CreateCameraCaptureの引数はカメラのIndex(通常は0から始まる)
            using (var capture = Cv.CreateCameraCapture(0))
            {
                Console.WriteLine("Hit any key to quit");

                /*
                double fps=12.0;
                int interval=1;
                double zoom=1.0;
                string OutputFile;
                */

                double fps ;
                int interval ;
                double zoom=1.0 ;
                string OutputFile;

                var opts = new Options();
                 bool isSuccess = CommandLine.Parser.Default.ParseArguments(args, opts);

                if(!isSuccess)
                {
                    opts.GetUsage();
                    Console.WriteLine(Environment.GetCommandLineArgs()[0] + "  -o Outputfilename(string) -f fps(double) -i CaptureInterval(int)");
                    Environment.Exit(0);
                }

                    fps = opts.fps;
                    interval = opts.interval;
                    zoom = opts.zoom;
                    OutputFile = opts.OutputFile;
                    Console.WriteLine(OutputFile);
                    if (fps > 30 | interval < 0.1)
                    {
                        Console.WriteLine(" :-p");
                        Environment.Exit(1);
                    }

                Int32 codec = 0; // コーデック(AVI)
                IplImage frame = new IplImage();

                /*
                double width = capture.FrameWidth/2;
                double height = capture.FrameHeight/2;

                //double width = 640, height = 240;
                Cv.SetCaptureProperty(capture, CaptureProperty.FrameWidth, width);
                Cv.SetCaptureProperty(capture, CaptureProperty.FrameHeight, height);
                CvSize size = new CvSize((int)width, (int)height);
                CvVideoWriter vw = new CvVideoWriter(OutputFile, codec, fps, size, true);
                */

                int width = (int)(Cv.GetCaptureProperty(capture, CaptureProperty.FrameWidth)*zoom);
                int height = (int)(Cv.GetCaptureProperty(capture, CaptureProperty.FrameHeight)*zoom);

                //Cv.SetCaptureProperty(capture, CaptureProperty.FrameWidth, width);
                //Cv.SetCaptureProperty(capture, CaptureProperty.FrameWidth, height);
                //Bitmap bitmap = new Bitmap(width, height);

                CvSize size = new CvSize(width, height);
                CvVideoWriter vw = new CvVideoWriter(OutputFile, codec, fps, size, true);

                //CvFont font = new CvFont(FontFace.HersheyTriplex, 0.7, 0.7);
                //(FontFace.HersheyPlain, 1.0, 1.0, 0, 2);

                double fontSize;
                if(width>600)
                     fontSize=1.0;
                else
                     fontSize=0.5;

                CvFont font = new CvFont(FontFace.HersheyPlain,fontSize,fontSize);

                //  何かキーを押すまでは、Webカメラの画像を表示し続ける
                while (Cv.WaitKey(1) == -1)
                {
                    System.Threading.Thread.Sleep(1000*interval);
                    //  カメラからフレームを取得
                    frame = Cv.QueryFrame(capture);
                    string str = DateTime.Now.ToString();

                    //  Window「Capture」を作って、Webカメラの画像を表示
                    if (frame != null)
                    {
                        frame.PutText(str, new CvPoint(10, 20), font, new CvColor(200,100,50));
                        Cv.ShowImage("Timelapse", frame);
                        //frame.SaveImage("result.bmp");
                       //bitmap = BitmapConverter.ToBitmap(frame);
                        //OpenCvSharp.IplImage ipl2 = (OpenCvSharp.IplImage)BitmapConverter.ToIplImage(bitmap);
                        vw.WriteFrame(frame);
                        // vw.WriteFrame(ipl2);
                        frame.Dispose();
                    }
                }

                Cv.DestroyWindow("Capture");
                vw.Dispose();
            }
        }
        /// <summary>
        /// 認識処理を行う
        /// </summary>
        /// <param name="imagePath">認識対象の画像パス</param>
        /// <param name="isDebug">デバッグモード</param>
        /// <returns></returns>
        public static String Recognize(String imagePath, bool isDebug = false)
        {
            List<String> results = new List<string>();

            // 検出対象の画像を読み込み
            IplImage src = new IplImage(imagePath, LoadMode.GrayScale);

            using (IplImage tmpImage = new IplImage(src.Size, BitDepth.U8, 1))
            {
                // 1)検出前処理

                // エッジ強調
                src.UnsharpMasking(src, 3);

                // 大津の手法による二値化処理
                // 大津, "判別および最小2乗基準に基づく自動しきい値選定法", 電子通信学会論文誌, Vol.J63-D, No.4, pp.349-356, 1980.
                src.Threshold(tmpImage, 200, 250, ThresholdType.Otsu);

                src.Dispose();

                Dictionary<int, List<double>> shapeMatchResults = new Dictionary<int, List<double>>();

                List<string> answerFileNames = washTagDictionary.Keys.ToList();
                foreach (var answerFileName in answerFileNames)
                {
                    var washTagInfo = washTagDictionary[answerFileName];
                    var answerImagePath = String.Format(@"answer\{0}.png", answerFileName);

                    // 2) 検出処理
                    var resultSURF = SURF(tmpImage, answerImagePath, isDebug);

                    // 3) 検出候補の評価
                    string result = null;

                    // その1:頂点がある場合
                    if (resultSURF.dstCorners != null)
                    {
                        // TODO:平面評価
                        //result = fileBaseName + " : " + washTagDictionary[fileBaseName];
                    }

                    // その2:形状マッチング
                    if (result == null && resultSURF.findPointList.Count > 0)
                    {
                        // ROIの1辺は、横に4つ位入る大きさで(何となくw)
                        CvSize roiSize = new CvSize(tmpImage.Width / 4, tmpImage.Width / 4);

                        List<double> matchResults = new List<double>();
                        foreach (var findPoint in resultSURF.findPointList)
                        {
                            // ROIを設定
                            tmpImage.SetROI(
                                (int)findPoint.Pt.X - roiSize.Width / 2,
                                (int)findPoint.Pt.Y - roiSize.Height / 2,
                                roiSize.Width, roiSize.Height
                            );
                            // Huモーメントによる形状マッチング [回転・スケーリング・反転に強い]
                            matchResults.Add(
                                CompareShapeMoment(tmpImage, answerImagePath, MatchShapesMethod.I1)
                            );
                            // ROIをリセット
                            tmpImage.ResetROI();
                        }

                        // 閾値以下だった場合に検出と見なす
                        if (matchResults.Min() < 0.005)
                        {
                            // カテゴリに値が無ければ確保
                            if (shapeMatchResults.ContainsKey(washTagInfo.CategoryNo) == false)
                            {
                                shapeMatchResults.Add(washTagInfo.CategoryNo, new List<double>());
                            }

                            shapeMatchResults[washTagInfo.CategoryNo].Add(matchResults.Min());
                        }
                    }
                }

                // 4)認識結果の整理
                foreach (var categoryNo in shapeMatchResults.Keys)
                {
                    var matchResult = shapeMatchResults[categoryNo];

                    var min = matchResult.Min();
                    var index = matchResult.FindIndex((x) =>
                    {
                        return x == min;
                    });

                    var id = String.Format("{0:0}{1:00}", categoryNo, index + 1);
                    var recognitionWashTag = washTagDictionary[id];

                    // 結果を格納
                    results.Add(
                        String.Format(isDebug ? "{0} : {1} ({2})" : "{0} : {1}", id, recognitionWashTag.Description, min)
                    );

                }

                // デバッグ表示
                if (isDebug)
                {
                    using (CvWindow win = new CvWindow("image", tmpImage))
                    {
                        CvWindow.WaitKey();
                    }
                }
            }

            return results.Count > 0
                ? String.Join("\n", results.ToArray())
                : "検出する事が出来ませんでした。";
        }
Exemplo n.º 6
0
 public static IplImage GetTrainingExample(System.Drawing.Size size, string fileName)
 {
     IplImage src = new IplImage(fileName);
     IplImage dst = new IplImage(src.Size, src.Depth, src.NChannels);
     Cv.Resize(src, dst, Interpolation.Linear);
     src.Dispose();
     return dst;
 }