Ejemplo n.º 1
0
        //画像ファイルロード
        private bool LoadImageFile(String file_name)
        {
            //カスケード分類器の特徴量を取得する
            CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile(@"C:\opencv2.4.10\sources\data\haarcascades\haarcascade_frontalface_alt.xml");
            CvMemStorage strage = new CvMemStorage(0);   // メモリを確保
            this.ImageFileName = file_name;

            using (IplImage img = new IplImage(this.ImageFileName))
            {
                //グレースケールに変換
                using( IplImage gray_image = Cv.CreateImage(new CvSize(img.Width,img.Height),BitDepth.U8,1) )
                {
                    Cv.CvtColor(img, gray_image, ColorConversion.BgrToGray);

                    //発見した矩形
                    var result = Cv.HaarDetectObjects(gray_image, cascade, strage);
                    for (int i = 0; i < result.Total; i++)
                    {
                        //矩形の大きさに書き出す
                        CvRect rect = result[i].Value.Rect;
                        Cv.Rectangle(img, rect, new CvColor(255, 0, 0));

                        //iplimageをコピー
                        img.ROI = rect;
                        CvRect roi_rect = img.ROI;
                        IplImage ipl_image = Cv.CreateImage(new CvSize(img.Width, img.Height), BitDepth.U8, 1);
                        ipl_image = img.Clone(img.ROI);
/*
                        //確認
                        new CvWindow(ipl_image);
                        Cv.WaitKey();
*/
                        //見つけた顔候補をすべてチェックするために記録する
                        this.FaceIplList.Add(ipl_image);
                    }
                }

                //メモリ解放
                cascade.Dispose();
                strage.Dispose();

                return true;
            }
        }
Ejemplo n.º 2
0
        public Bitmap Detect2()
        {
            IplImage image = videoCapture.QueryFrame();
            IplImage ret = getHandPicturesGMG(image);

            CvMemStorage storage = new CvMemStorage();
            CvSeq<CvAvgComp> hands = Cv.HaarDetectObjects(ret, cascade, storage, 1.139);
            foreach (CvAvgComp obj in hands)
            {
                ret.Rectangle(obj.Rect, CvColor.Red);
            }

            Bitmap bt = ret.ToBitmap();
            ret.Dispose();
            image.Dispose();
            storage.Dispose();
            hands.Dispose();
            return bt;
        }
Ejemplo n.º 3
0
        /// <summary>
        /// ファイル名から特徴量を出す処理
        /// </summary>
        /// <param name="file_name"></param>
        public  void MakeFeatureFromIpl(IplImage ipl_image, int face_id)
        {
            string eye_cascade_xml = @"C:\opencv2.4.10\sources\data\haarcascades\haarcascade_eye.xml";
            string nose_cascade_xml = @"C:\opencv2.4.10\sources\data\haarcascades\haarcascade_mcs_nose.xml";
            string mouth_cascade_xml = @"C:\opencv2.4.10\sources\data\haarcascades\haarcascade_mcs_mouth.xml";

            CvMemStorage strage = new CvMemStorage(0);   // メモリを確保
            CvHaarClassifierCascade eye_cascade = CvHaarClassifierCascade.FromFile(eye_cascade_xml);
            CvHaarClassifierCascade nose_cascade = CvHaarClassifierCascade.FromFile(nose_cascade_xml);
            CvHaarClassifierCascade mouth_cascade = CvHaarClassifierCascade.FromFile(mouth_cascade_xml);

            //リストにあるファイルを一枚づつデータにする
            {
                IplImage tmp_image;
                //サイズが小さければ拡大して使う
                if (ipl_image.Size.Width < SMALL_IMAGE_LIMIT)
                {
                    tmp_image = Cv.CreateImage(new CvSize(ipl_image.Width * IMAGE_RESIZE_RATE, ipl_image.Height * IMAGE_RESIZE_RATE), BitDepth.U8, 3);
                    Cv.Resize(ipl_image, tmp_image);
                }
                else
                {
                    tmp_image = Cv.CreateImage(new CvSize(ipl_image.Width, ipl_image.Height), BitDepth.U8, 3);
                    Cv.Resize(ipl_image, tmp_image);
                }

                //グレースケールに変換
                IplImage gray_image = Cv.CreateImage(new CvSize(tmp_image.Width, tmp_image.Height), BitDepth.U8, 1);
                Cv.CvtColor(tmp_image, gray_image, ColorConversion.BgrToGray);

                //発見した矩形
                this.EyeResult = Cv.HaarDetectObjects(gray_image, eye_cascade, strage);

                //鼻は画像の真ん中の方だけ
                {
                    IplImage gray_nose_image = Cv.CreateImage(new CvSize(tmp_image.Width, tmp_image.Height), BitDepth.U8, 1);
                    Cv.CvtColor(tmp_image, gray_nose_image, ColorConversion.BgrToGray);
                    CvRect rect = new CvRect(0, (int)(tmp_image.Height*0.25), tmp_image.Width, tmp_image.Height / 2);
                    gray_nose_image.ROI = rect;
//                  new CvWindow(gray_nose_image);
//                  Cv.WaitKey();

                    this.NoseResult = Cv.HaarDetectObjects(gray_nose_image, nose_cascade, strage);
                }

                //口は画像の下半分だけを調べる
                {
                    IplImage gray_mouth_image = Cv.CreateImage(new CvSize(tmp_image.Width, tmp_image.Height), BitDepth.U8, 1);
                    Cv.CvtColor(tmp_image, gray_mouth_image, ColorConversion.BgrToGray);
                    CvRect rect = new CvRect(0, (int)(tmp_image.Height *0.66), tmp_image.Width, tmp_image.Height / 3);
                    gray_mouth_image.ROI = rect;
//                    new CvWindow(gray_mouth_image);
//                     Cv.WaitKey();
                    this.MouthResult = Cv.HaarDetectObjects(gray_mouth_image, mouth_cascade, strage);
                }
                //初期化
                DataInit();
                //デバッグ用の表示
//                DebugPrint(tmp_image, this.ReadCount);

                //左眼、右目、鼻、口の矩形を確定させる。
                DecidePartsRect(gray_image);

                //パーツ確定後
//                DebugPrint2(gray_image, this.ReadCount);

                PartsRectInfo parts_info;
                parts_info.RightEye = this.RightEyeRect;
                parts_info.LeftEye = this.LeftEyeRect;
                parts_info.Nose = this.NoseRect;
                parts_info.Mouth = this.MouthRect;

                //特徴量を作る
                FeatureValue feature_value = new FeatureValue();
                bool ret = MakeFeatureValue(gray_image, ref parts_info, out feature_value);

                //正しいデータを登録
                if (ret)
                {
                    feature_value.ID = face_id;
                    this.FeatuerValueList.Add(feature_value);
                }
            }

            //メモリ解放
            eye_cascade.Dispose();
            nose_cascade.Dispose();
            mouth_cascade.Dispose();
            strage.Dispose();
            return;
        }
Ejemplo n.º 4
0
        private void timer1_Tick(object sender, EventArgs e)
        {
            // キャプチャの開始. Capture starts.
            IplImage ipl1 = capture.QueryFrame();
            int l = 0;
            int r = 0;
            int m = 0;
            // 取得したカメラ画像の高さと幅を取得し、labelに表示. Height and width of camera are shown in label.
            //labelWidth.Text = capture.FrameWidth.ToString();
            //labelHeight.Text = capture.FrameHeight.ToString();

            if (ipl1 != null)
            {
                fig = true;
                // pictureBoxに取得した画像を表示. Show the captured image.
                pictureBox1.Image = ipl1.ToBitmap();
                // メモリリークが発生するらしいので
                // プログラムが動的に確保したメモリ領域のうち、
                // 不要になった領域を定期的に自動解放する
                if (GC.GetTotalMemory(false) > 600000)
                {
                    GC.Collect();
                }

                // Image processing should be written from here.
                // Extract edge of circle
                CvMemStorage storage = new CvMemStorage();
                IplImage gray = new IplImage(ipl1.Size, BitDepth.U8, 1);
                IplImage binary = new IplImage(ipl1.Size, BitDepth.U8, 1);
                //transform grayscale image from caputure image
                Cv.CvtColor(ipl1, gray, ColorConversion.BgrToGray);
                //グレイスケールの平滑化 誤検出を減らすため. Decrease the error recognition by Smoothing of the gray scale.
                //Cv.Smooth(gray, gray, SmoothType.Gaussian, 9, 9, 1, 1);
                //二値化した画像から円検出を行う(ハフ関数を使う). Using Hough function from grayscale.
                CvSeq<CvCircleSegment> circl = Cv.HoughCircles(gray, storage, HoughCirclesMethod.Gradient, 2,200, 100, 100, 20, 200);
                //HougCircles(input picture, memory space,Binarization method,inverse of graphic mode,shortest distance from other circle,)
                foreach (CvCircleSegment crcl in circl)
                {
                    ipl1.Circle(crcl.Center, (int)crcl.Radius, CvColor.Blue, 3);
                    labelCenter.Text = crcl.Center.ToString();
                    //labelCenter.Text = crcl.Center.ToString();
                    //double Area;
                    Area = crcl.Radius;// * crcl.Radius*3.14;
                    labelRadius.Text = Area.ToString();
                    x_pos = crcl.Center.X;
                    //labelArea.Text = Area.ToString();
                }
                //circl.Dispose();
                storage.Dispose();
                pictureBox1.Invalidate();

                // Show the binaryimage to picturebox3.
                Cv.Canny(gray,binary , 100, 100);
                pictureBox3.Image = binary.ToBitmap();
                //Show the capture image to picturebox2
                pictureBox2.Image = ipl1.ToBitmap();

                //
                if (Area < 45)
                {
                    textBox1.Text = br.Forward();
                }
                else if (Area > 80)
                {
                    textBox1.Text = br.Back();
                }
                else
                {
                    if (x_pos < 100 && Area > 0)
                    {
                        textBox1.Text = br.TurnLeft();
                    }
                    else if (x_pos > 220 && Area > 0)
                    {
                        textBox1.Text = br.TurnRight();
                    }
                    else
                    {
                        textBox1.Text = br.Stop();
                    }
                }

            }
            else
            {
                timer1.Stop();
            }
        }