public DrawToHdc() { CvRect roi = new CvRect(320, 260, 100, 100); // region of roosevelt's face using (var src = new IplImage(FilePath.Image.Yalta, LoadMode.Color)) using (var dst = new IplImage(roi.Size, BitDepth.U8, 3)) { src.ROI = roi; using (Bitmap bitmap = new Bitmap(roi.Width, roi.Height, PixelFormat.Format32bppArgb)) using (Graphics g = Graphics.FromImage(bitmap)) { //BitmapConverter.DrawToGraphics(src, g, new CvRect(new CvPoint(0, 0), roi.Size)); IntPtr hdc = g.GetHdc(); BitmapConverter.DrawToHdc(src, hdc, new CvRect(new CvPoint(0, 0), roi.Size)); g.ReleaseHdc(hdc); g.DrawString("Roosevelt", new Font(FontFamily.GenericSerif, 12), Brushes.Red, 20, 0); g.DrawEllipse(new Pen(Color.Red, 4), new Rectangle(20, 20, roi.Width / 2, roi.Height / 2)); dst.CopyFrom(bitmap); } src.ResetROI(); using (new CvWindow("src", src)) using (new CvWindow("dst", dst)) { Cv.WaitKey(); } } }
/// <summary> /// Bitmap -> IplImage /// </summary> private void TestBitmap() { using (Bitmap bitmap = new Bitmap(Const.ImageFruits)) { IplImage ipl = new IplImage(bitmap.Width, bitmap.Height, BitDepth.U8, 3); //ipl = bitmap.ToIplImage(); ipl.CopyFrom(bitmap); using (new CvWindow("from Bitmap to IplImage", ipl)) { Cv.WaitKey(); } } }
public IplImage DrawToHdc(IplImage src) { hdcgrahics = new IplImage(src.Size, BitDepth.U8, 3); Bitmap bitmap = new Bitmap(src.Width, src.Height, PixelFormat.Format32bppRgb); Graphics grp = Graphics.FromImage(bitmap); IntPtr hdc = grp.GetHdc(); BitmapConverter.DrawToHdc(src, hdc, new CvRect(new CvPoint(0, 150), new CvSize(src.Width, src.Height - 150))); grp.ReleaseHdc(hdc); grp.DrawString("안녕하세요.", new Font("맑은 고딕", 72), Brushes.Red, 5, 5); hdcgrahics.CopyFrom(bitmap); return(hdcgrahics); }
/// <summary> /// 顔の位置を取得 /// </summary> /// <param name="headPosition">スケルトンの頭の位置座標</param> /// <returns>顔座標</returns> private Rect CheckFacePosition(ColorImagePoint headPosition) { //切り取る領域の範囲 int snipWidth = 200; int snipHeight = 200; // 返却用Rect (初期値はスケルトンの頭の座標とimage2画像の幅) Rect reRect = new Rect(headPosition.X, headPosition.Y, image2.Width, image2.Height); storage.Clear(); openCVGrayImage.ResetROI(); // たまにROIがセットされた状態で呼ばれるためROIをリセット openCVImage.CopyFrom(outputImage); // WriteableBitmap -> IplImage Cv.CvtColor(openCVImage, openCVGrayImage, ColorConversion.BgrToGray); // 画像をグレイスケール化 Cv.EqualizeHist(openCVGrayImage, openCVGrayImage); // 画像の平滑化 // 顔認識 try { // 画像の切り取り var snipImage = SnipFaceImage(openCVGrayImage, headPosition, snipWidth, snipHeight); if (snipImage != null) { CvSeq <CvAvgComp> faces = Cv.HaarDetectObjects(snipImage, cascade, storage); // 顔を検出した場合 if (faces.Total > 0) { reRect.X = faces[0].Value.Rect.X + (headPosition.X - snipWidth / 2); reRect.Y = faces[0].Value.Rect.Y + (headPosition.Y - snipHeight / 2); reRect.Width = faces[0].Value.Rect.Width; reRect.Height = faces[0].Value.Rect.Height; } } } catch (Exception) { } return(reRect); }