public Contour() { // cvContourArea, cvArcLength // 輪郭によって区切られた領域の面積と,輪郭の長さを求める const int SIZE = 500; // (1)画像を確保し初期化する using (CvMemStorage storage = new CvMemStorage()) using (IplImage img = new IplImage(SIZE, SIZE, BitDepth.U8, 3)) { img.Zero(); // (2)点列を生成する CvSeq <CvPoint> points = new CvSeq <CvPoint>(SeqType.PolyLine, storage); CvRNG rng = new CvRNG((ulong)DateTime.Now.Ticks); double scale = rng.RandReal() + 0.5; CvPoint pt0 = new CvPoint { X = (int)(Math.Cos(0) * SIZE / 4 * scale + SIZE / 2), Y = (int)(Math.Sin(0) * SIZE / 4 * scale + SIZE / 2) }; img.Circle(pt0, 2, CvColor.Green); points.Push(pt0); for (int i = 1; i < 20; i++) { scale = rng.RandReal() + 0.5; CvPoint pt1 = new CvPoint { X = (int)(Math.Cos(i * 2 * Math.PI / 20) * SIZE / 4 * scale + SIZE / 2), Y = (int)(Math.Sin(i * 2 * Math.PI / 20) * SIZE / 4 * scale + SIZE / 2) }; img.Line(pt0, pt1, CvColor.Green, 2); pt0.X = pt1.X; pt0.Y = pt1.Y; img.Circle(pt0, 3, CvColor.Green, Cv.FILLED); points.Push(pt0); } img.Line(pt0, points.GetSeqElem(0).Value, CvColor.Green, 2); // (3)包含矩形,面積,長さを求める CvRect rect = points.BoundingRect(false); double area = points.ContourArea(); double length = points.ArcLength(CvSlice.WholeSeq, 1); // (4)結果を画像に書き込む img.Rectangle(new CvPoint(rect.X, rect.Y), new CvPoint(rect.X + rect.Width, rect.Y + rect.Height), CvColor.Red, 2); string text_area = string.Format("Area: wrect={0}, contour={1}", rect.Width * rect.Height, area); string text_length = string.Format("Length: rect={0}, contour={1}", 2 * (rect.Width + rect.Height), length); using (CvFont font = new CvFont(FontFace.HersheySimplex, 0.7, 0.7, 0, 1, LineType.AntiAlias)) { img.PutText(text_area, new CvPoint(10, img.Height - 30), font, CvColor.White); img.PutText(text_length, new CvPoint(10, img.Height - 10), font, CvColor.White); } // (5)画像を表示,キーが押されたときに終了 using (CvWindow window = new CvWindow("BoundingRect", WindowMode.AutoSize)) { window.Image = img; CvWindow.WaitKey(0); } } }
public FitLine() { CvSize imageSize = new CvSize(500, 500); // cvFitLine CvPoint2D32f[] points = GetRandomPoints(20, imageSize); CvLine2D line = Cv.FitLine2D(points, DistanceType.L2, 0, 0.01, 0.01); using (IplImage img = new IplImage(imageSize, BitDepth.U8, 3)) { img.Zero(); // draw line { CvPoint pt1, pt2; line.FitSize(img.Width, img.Height, out pt1, out pt2); img.Line(pt1, pt2, CvColor.Green, 1, LineType.Link8); } // draw points and distances using (CvFont font = new CvFont(FontFace.HersheySimplex, 0.33, 0.33)) { foreach (CvPoint2D32f p in points) { double d = line.Distance(p); img.Circle(p, 2, CvColor.White, -1, LineType.AntiAlias); img.PutText(string.Format("{0:F1}", d), new CvPoint((int)(p.X + 3), (int)(p.Y + 3)), font, CvColor.Green); } } CvWindow.ShowImages(img); } }
public FaceDetect() { CheckMemoryLeak(); // CvHaarClassifierCascade, cvHaarDetectObjects CvColor[] colors = new CvColor[] { new CvColor(0, 0, 255), new CvColor(0, 128, 255), new CvColor(0, 255, 255), new CvColor(0, 255, 0), new CvColor(255, 128, 0), new CvColor(255, 255, 0), new CvColor(255, 0, 0), new CvColor(255, 0, 255), }; const double Scale = 1.14; const double ScaleFactor = 1.0850; const int MinNeighbors = 2; using (IplImage img = new IplImage(FilePath.Image.Yalta, LoadMode.Color)) using (IplImage smallImg = new IplImage(new CvSize(Cv.Round(img.Width / Scale), Cv.Round(img.Height / Scale)), BitDepth.U8, 1)) { using (IplImage gray = new IplImage(img.Size, BitDepth.U8, 1)) { Cv.CvtColor(img, gray, ColorConversion.BgrToGray); Cv.Resize(gray, smallImg, Interpolation.Linear); Cv.EqualizeHist(smallImg, smallImg); } using (var cascade = CvHaarClassifierCascade.FromFile(FilePath.Text.HaarCascade)) using (var storage = new CvMemStorage()) { storage.Clear(); // 顔の検出 Stopwatch watch = Stopwatch.StartNew(); CvSeq <CvAvgComp> faces = Cv.HaarDetectObjects(smallImg, cascade, storage, ScaleFactor, MinNeighbors, 0, new CvSize(30, 30)); watch.Stop(); Console.WriteLine("detection time = {0}ms\n", watch.ElapsedMilliseconds); // 検出した箇所にまるをつける for (int i = 0; i < faces.Total; i++) { CvRect r = faces[i].Value.Rect; CvPoint center = new CvPoint { X = Cv.Round((r.X + r.Width * 0.5) * Scale), Y = Cv.Round((r.Y + r.Height * 0.5) * Scale) }; int radius = Cv.Round((r.Width + r.Height) * 0.25 * Scale); img.Circle(center, radius, colors[i % 8], 3, LineType.AntiAlias, 0); } } // ウィンドウに表示 CvWindow.ShowImages(img); } }
public HoughCircles() { using (IplImage imgSrc = new IplImage(FilePath.Image.Walkman, LoadMode.Color)) using (IplImage imgGray = new IplImage(imgSrc.Size, BitDepth.U8, 1)) using (IplImage imgHough = imgSrc.Clone()) { Cv.CvtColor(imgSrc, imgGray, ColorConversion.BgrToGray); Cv.Smooth(imgGray, imgGray, SmoothType.Gaussian, 9); //Cv.Canny(imgGray, imgGray, 75, 150, ApertureSize.Size3); using (var storage = new CvMemStorage()) { CvSeq <CvCircleSegment> seq = imgGray.HoughCircles(storage, HoughCirclesMethod.Gradient, 1, 100, 150, 55, 0, 0); foreach (CvCircleSegment item in seq) { imgHough.Circle(item.Center, (int)item.Radius, CvColor.Red, 3); } } using (new CvWindow("gray", WindowMode.AutoSize, imgGray)) using (new CvWindow("Hough circles", WindowMode.AutoSize, imgHough)) { CvWindow.WaitKey(0); } } }
/// <summary> /// 入力特徴量を図にする /// </summary> /// <param name="data_array"></param> private void Debug_DrawInputFeature(CvPoint2D32f[] points, int[] id_array) { using (IplImage pointsPlot = Cv.CreateImage(new CvSize(300, 300), BitDepth.U8, 3)) { pointsPlot.Zero(); for (int i = 0; i < id_array.Length; i++) { int x = (int)(points[i].X * 300); int y = (int)(300 - points[i].Y * 300); int res = id_array[i]; // CvColor color = (res == 1) ? CvColor.Red : CvColor.GreenYellow; CvColor color = new CvColor(); if (res == 1) { color = CvColor.Red; } else if (res == 2) { color = CvColor.GreenYellow; } pointsPlot.Circle(x, y, 2, color, -1); } CvWindow.ShowImages(pointsPlot); } }
public EyeDetect() { CvColor[] colors = new CvColor[] { new CvColor(0, 0, 255), new CvColor(0, 128, 255), new CvColor(0, 255, 255), new CvColor(0, 255, 0), new CvColor(255, 128, 0), new CvColor(255, 255, 0), new CvColor(255, 0, 0), new CvColor(255, 0, 255), }; const double Scale = 1.25; const double ScaleFactor = 2.5; const int MinNeighbors = 2; using (CvCapture cap = CvCapture.FromCamera(1)) using (CvWindow w = new CvWindow("Eye Tracker")) { while (CvWindow.WaitKey(10) < 0) { using (IplImage img = cap.QueryFrame()) using (IplImage smallImg = new IplImage(new CvSize(Cv.Round(img.Width / Scale), Cv.Round(img.Height / Scale)), BitDepth.U8, 1)) { using (IplImage gray = new IplImage(img.Size, BitDepth.U8, 1)) { Cv.CvtColor(img, gray, ColorConversion.BgrToGray); Cv.Resize(gray, smallImg, Interpolation.Linear); Cv.EqualizeHist(smallImg, smallImg); } using (CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile("C:\\Program Files\\OpenCV\\data\\haarcascades\\haarcascade_eye.xml")) using (CvMemStorage storage = new CvMemStorage()) { storage.Clear(); Stopwatch watch = Stopwatch.StartNew(); CvSeq <CvAvgComp> eyes = Cv.HaarDetectObjects(smallImg, cascade, storage, ScaleFactor, MinNeighbors, 0, new CvSize(30, 30)); watch.Stop(); //Console.WriteLine("detection time = {0}msn", watch.ElapsedMilliseconds); for (int i = 0; i < eyes.Total; i++) { CvRect r = eyes[i].Value.Rect; CvPoint center = new CvPoint { X = Cv.Round((r.X + r.Width * 0.5) * Scale), Y = Cv.Round((r.Y + r.Height * 0.5) * Scale) }; int radius = Cv.Round((r.Width + r.Height) * 0.25 * Scale); img.Circle(center, radius, colors[i % 8], 3, LineType.AntiAlias, 0); } } w.Image = img; } } } }
public System.Drawing.Bitmap FaceDetect(IplImage src) { // CvHaarClassifierCascade, cvHaarDetectObjects // 얼굴을 검출하기 위해서 Haar 분류기의 캐스케이드를 이용한다 CvColor[] colors = new CvColor[] { new CvColor(0, 0, 255), new CvColor(0, 128, 255), new CvColor(0, 255, 255), new CvColor(0, 255, 0), new CvColor(255, 128, 0), new CvColor(255, 255, 0), new CvColor(255, 0, 0), new CvColor(255, 0, 255), }; const double scale = 1.04; const double scaleFactor = 1.139; const int minNeighbors = 1; using (IplImage img = src.Clone()) using (IplImage smallImg = new IplImage(new CvSize(Cv.Round(img.Width / scale), Cv.Round(img.Height / scale)), BitDepth.U8, 1)) { // 얼굴 검출을 위한 화상을 생성한다. using (IplImage gray = new IplImage(img.Size, BitDepth.U8, 1)) { Cv.CvtColor(img, gray, ColorConversion.BgrToGray); Cv.Resize(gray, smallImg, Interpolation.Linear); Cv.EqualizeHist(smallImg, smallImg); } using (CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile(Environment.CurrentDirectory + "\\" + "haarcascade_frontalface_alt.xml")) using (CvMemStorage storage = new CvMemStorage()) { storage.Clear(); // 얼굴을 검출한다. CvSeq <CvAvgComp> faces = Cv.HaarDetectObjects(smallImg, cascade, storage, scaleFactor, minNeighbors, 0, new CvSize(20, 20)); // 검출한 얼굴에 검은색 원을 덮어씌운다. for (int i = 0; i < faces.Total; i++) { CvRect r = faces[i].Value.Rect; CvPoint center = new CvPoint { X = Cv.Round((r.X + r.Width * 0.5) * scale), Y = Cv.Round((r.Y + r.Height * 0.5) * scale) }; int radius = Cv.Round((r.Width + r.Height) * 0.25 * scale); img.Circle(center, radius, new CvColor(0, 0, 0), -1, LineType.Link8, 0); } } FindFace = img.Clone(); //생성한 IplImage 화상을 비트맵으로 변환해 반환한다. return(FindFace.ToBitmap(System.Drawing.Imaging.PixelFormat.Format24bppRgb)); } }
public Watershed() { // cvWatershed // マウスで円形のマーカー(シード領域)の中心を指定し,複数のマーカーを設定する. // このマーカを画像のgradientに沿って広げて行き,gradientの高い部分に出来る境界を元に領域を分割する. // 領域は,最初に指定したマーカーの数に分割される. // (2)画像の読み込み,マーカー画像の初期化,結果表示用画像領域の確保を行なう using (IplImage srcImg = new IplImage(Const.ImageGoryokaku, LoadMode.AnyDepth | LoadMode.AnyColor)) using (IplImage dstImg = srcImg.Clone()) using (IplImage dspImg = srcImg.Clone()) using (IplImage markers = new IplImage(srcImg.Size, BitDepth.S32, 1)) { markers.Zero(); // (3)入力画像を表示しシードコンポーネント指定のためのマウスイベントを登録する using (CvWindow wImage = new CvWindow("image", WindowMode.AutoSize)) { wImage.Image = srcImg; // クリックにより中心を指定し,円形のシード領域を設定する int seedNum = 0; wImage.OnMouseCallback += delegate(MouseEvent ev, int x, int y, MouseEvent flags) { if (ev == MouseEvent.LButtonDown) { seedNum++; CvPoint pt = new CvPoint(x, y); markers.Circle(pt, 20, CvScalar.ScalarAll(seedNum), Cv.FILLED, LineType.Link8, 0); dspImg.Circle(pt, 20, CvColor.White, 3, LineType.Link8, 0); wImage.Image = dspImg; } }; CvWindow.WaitKey(); } // (4)watershed分割を実行する Cv.Watershed(srcImg, markers); // (5)実行結果の画像中のwatershed境界(ピクセル値=-1)を結果表示用画像上に表示する for (int i = 0; i < markers.Height; i++) { for (int j = 0; j < markers.Width; j++) { int idx = (int)(markers.Get2D(i, j).Val0); if (idx == -1) { dstImg.Set2D(i, j, CvColor.Red); } } } using (CvWindow wDst = new CvWindow("watershed transform", WindowMode.AutoSize)) { wDst.Image = dstImg; CvWindow.WaitKey(); } } }
private void 얼굴검출ToolStripMenuItem_Click(object sender, EventArgs e) { CvColor[] colors = new CvColor[] { new CvColor(0, 0, 255), new CvColor(0, 128, 255), new CvColor(0, 255, 255), new CvColor(0, 255, 0), new CvColor(255, 128, 0), new CvColor(255, 255, 0), new CvColor(255, 0, 0), new CvColor(255, 0, 255), }; const double scale = 1.04; const double scaleFactor = 1.139; const int minNeighbors = 2; using (IplImage img = src.Clone()) using (IplImage smallImg = new IplImage(new CvSize(Cv.Round(img.Width / scale), Cv.Round(img.Height / scale)), BitDepth.U8, 1)) { // 얼굴 검출용의 화상의 생성 using (IplImage gray = new IplImage(img.Size, BitDepth.U8, 1)) { Cv.CvtColor(img, gray, ColorConversion.BgrToGray); Cv.Resize(gray, smallImg, Interpolation.Linear); Cv.EqualizeHist(smallImg, smallImg); } //using (CvHaarClassifierCascade cascade = Cv.Load<CvHaarClassifierCascade>(Const.XmlHaarcascade)) // 아무거나 가능 using (CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile(Application.StartupPath + "\\" + "haarcascade_frontalface_alt.xml")) // using (CvMemStorage storage = new CvMemStorage()) { storage.Clear(); // 얼굴의 검출 CvSeq <CvAvgComp> faces = Cv.HaarDetectObjects(smallImg, cascade, storage, scaleFactor, minNeighbors, 0, new CvSize(30, 30), new CvSize(180, 180)); // 검출한 얼굴에 원을 그린다 for (int i = 0; i < faces.Total; i++) { CvRect r = faces[i].Value.Rect; CvPoint center = new CvPoint { X = Cv.Round((r.X + r.Width * 0.5) * scale), Y = Cv.Round((r.Y + r.Height * 0.5) * scale) }; int radius = Cv.Round((r.Width + r.Height) * 0.25 * scale); img.Circle(center, radius, colors[i % 8], 3, LineType.AntiAlias, 0); } } dst = img.Clone(); pictureBoxIpl2.ImageIpl = dst; } }
// Update is called once per frame void Update() { IplImage frame = Cv.QueryFrame(capture); using (IplImage img = Cv.CloneImage(frame)) using (IplImage smallImg = new IplImage(new CvSize(Cv.Round(img.Width / Scale), Cv.Round(img.Height / Scale)), BitDepth.U8, 1)) { using (IplImage gray = new IplImage(img.Size, BitDepth.U8, 1)) { Cv.CvtColor(img, gray, ColorConversion.BgrToGray); Cv.Resize(gray, smallImg, Interpolation.Linear); Cv.EqualizeHist(smallImg, smallImg); } using (CvMemStorage storage = new CvMemStorage()) { storage.Clear(); CvSeq <CvAvgComp> faces = Cv.HaarDetectObjects(smallImg, cascade, storage, ScaleFactor, MinNeighbors, 0, new CvSize(64, 64)); for (int i = 0; i < faces.Total; i++) { CvRect r = faces[i].Value.Rect; CvPoint center = new CvPoint { X = Cv.Round((r.X + r.Width * 0.5) * Scale), Y = Cv.Round((r.Y + r.Height * 0.5) * Scale) }; int radius = Cv.Round((r.Width + r.Height) * 0.25 * Scale); img.Circle(center, radius, colors[i % 8], 3, LineType.AntiAlias, 0); } if (faces.Total > 0) { CvRect r = faces[0].Value.Rect; facepos = new Vector2((r.X + r.Width / 2.0f) / CAPTURE_WIDTH, (r.Y + r.Height / 2.0f) / CAPTURE_HEIGHT); } else { facepos = Vector2.zero; } if (facepos.x >= 0.2 && facepos.x <= 0.7 && facepos.y >= 0.2 && facepos.x <= 0.7) { isFaceInCapture = true; } else { isFaceInCapture = false; } } Cv.ShowImage("FaceDetect", img); } }
/////////////////////// public static IplImage FaceDetect(IplImage src) { IplImage FindFace; // CvHaarClassifierCascade, cvHaarDetectObjects // 얼굴을 검출하기 위해서 Haar 분류기의 캐스케이드를 이용한다 CvColor[] colors = new CvColor[] { new CvColor(0, 0, 255), new CvColor(0, 128, 255), new CvColor(0, 255, 255), new CvColor(0, 255, 0), new CvColor(255, 128, 0), new CvColor(255, 255, 0), new CvColor(255, 0, 0), new CvColor(255, 0, 255), }; const double scale = 1; const double scaleFactor = 1.139; const int minNeighbors = 2; IplImage img = src.Clone(); IplImage smallImg = new IplImage(new CvSize(Cv.Round(img.Width / scale), Cv.Round(img.Height / scale)), BitDepth.U8, 3); { // 얼굴 검출용의 화상의 생성 using (IplImage gray = new IplImage(img.Size, BitDepth.U8, 1)) { Cv.CvtColor(img, gray, ColorConversion.BgrToGray); Cv.Resize(gray, smallImg, Interpolation.Linear); Cv.EqualizeHist(smallImg, smallImg); } using (CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile("C:\\haarcascade_frontalface_default.xml")) using (CvMemStorage storage = new CvMemStorage()) { storage.Clear(); // 얼굴의 검출 CvSeq <CvAvgComp> faces = Cv.HaarDetectObjects(smallImg, cascade, storage, scaleFactor, minNeighbors, 0, new CvSize(24, 24)); // 검출한 얼굴에 원을 그린다 for (int i = 0; i < faces.Total; i++) { CvRect r = faces[i].Value.Rect; CvPoint center = new CvPoint { X = Cv.Round((r.X + r.Width * 0.5) * scale), Y = Cv.Round((r.Y + r.Height * 0.5) * scale) }; int radius = Cv.Round((r.Width + r.Height) * 0.25 * scale); img.Circle(center, radius, colors[i % 8], 3, LineType.AntiAlias, 0); } } FindFace = img.Clone(); return(FindFace); } }
public IplImage PostProcess(IplImage preProcessedImage, IplImage postProcessedImage) { using (CvMemStorage storage = new CvMemStorage()) { CvSeq seq = preProcessedImage.HoughLines2(storage, HoughLinesMethod.Probabilistic, 1, Math.PI / 180, 30, 40, 15); var lines = new List <CvLineSegmentPoint>(); for (int i = 0; i < seq.Total; i++) { var cvLineSegmentPoint = seq.GetSeqElem <CvLineSegmentPoint>(i); if (cvLineSegmentPoint != null) { lines.Add(cvLineSegmentPoint.Value); } } var groupedLines = RectangleFinder.GroupSegments(lines); var rects = RectangleFinder.Convert(groupedLines); RectangleFinder.Filter(rects); foreach (var cvRect in rects) { postProcessedImage.Rectangle(cvRect, CvColor.Red, 3, LineType.AntiAlias); } //for (int i = 0; i < groupedLines.Count; i++) //{ // var color = new CvColor(i*255/max,i*255/max,i*255/max); // var group = groupedLines[i]; // for (int j = 0; j < group.Lines.Count; j++) // { // CvLineSegmentPoint elem = group.Lines[j]; // imgHough.Line(elem.P1, elem.P2, color, 3, LineType.AntiAlias, 0); // } //} //Console.WriteLine(groupedLines.Count); CvSeq <CvCircleSegment> seq1 = preProcessedImage.HoughCircles(storage, HoughCirclesMethod.Gradient, 1, //imgGray.Size.Height / 8, 150, 55, 0, 50); 15, 100, 30, 9, 51); foreach (CvCircleSegment item in seq1) { postProcessedImage.Circle(item.Center, (int)item.Radius, CvColor.Red, 3); } } return(postProcessedImage); }
public Watershed() { using (var srcImg = new IplImage(FilePath.Image.Goryokaku, LoadMode.AnyDepth | LoadMode.AnyColor)) using (var dstImg = srcImg.Clone()) using (var dspImg = srcImg.Clone()) using (var markers = new IplImage(srcImg.Size, BitDepth.S32, 1)) { markers.Zero(); using (var window = new CvWindow("image", WindowMode.AutoSize)) { window.Image = srcImg; // Mouse event int seedNum = 0; window.OnMouseCallback += delegate(MouseEvent ev, int x, int y, MouseEvent flags) { if (ev == MouseEvent.LButtonDown) { seedNum++; CvPoint pt = new CvPoint(x, y); markers.Circle(pt, 20, CvScalar.ScalarAll(seedNum), Cv.FILLED, LineType.Link8, 0); dspImg.Circle(pt, 20, CvColor.White, 3, LineType.Link8, 0); window.Image = dspImg; } }; CvWindow.WaitKey(); } Cv.Watershed(srcImg, markers); // draws watershed for (int i = 0; i < markers.Height; i++) { for (int j = 0; j < markers.Width; j++) { int idx = (int)(markers.Get2D(i, j).Val0); if (idx == -1) { dstImg.Set2D(i, j, CvColor.Red); } } } using (CvWindow wDst = new CvWindow("watershed transform", WindowMode.AutoSize)) { wDst.Image = dstImg; CvWindow.WaitKey(); } } }
public BoundingRect() { // cvBoundingRect // 点列を包含する矩形を求める // (1)画像とメモリストレージを確保し初期化する // (メモリストレージは、CvSeqを使わないのであれば不要) using (IplImage img = new IplImage(640, 480, BitDepth.U8, 3)) using (CvMemStorage storage = new CvMemStorage(0)) { img.Zero(); CvRNG rng = new CvRNG(DateTime.Now); // (2)点列を生成する ///* // お手軽な方法 (普通の配列を使う) CvPoint[] points = new CvPoint[50]; for (int i = 0; i < 50; i++) { points[i] = new CvPoint() { X = (int)(rng.RandInt() % (img.Width / 2) + img.Width / 4), Y = (int)(rng.RandInt() % (img.Height / 2) + img.Height / 4) }; img.Circle(points[i], 3, new CvColor(0, 255, 0), Cv.FILLED); } //*/ /* * // サンプルに準拠した方法 (CvSeqを使う) * CvSeq points = new CvSeq(SeqType.EltypePoint, CvSeq.SizeOf, CvPoint.SizeOf, storage); * for (int i = 0; i < 50; i++) { * CvPoint pt = new CvPoint(); * pt.X = (int)(rng.RandInt() % (img.Width / 2) + img.Width / 4); * pt.Y = (int)(rng.RandInt() % (img.Height / 2) + img.Height / 4); * points.Push(pt); * img.Circle(pt, 3, new CvColor(0, 255, 0), Cv.FILLED); * } * //*/ // (3)点列を包含する矩形を求めて描画する CvRect rect = Cv.BoundingRect(points); img.Rectangle(new CvPoint(rect.X, rect.Y), new CvPoint(rect.X + rect.Width, rect.Y + rect.Height), new CvColor(255, 0, 0), 2); // (4)画像の表示,キーが押されたときに終了 using (CvWindow w = new CvWindow("BoundingRect", WindowMode.AutoSize, img)) { CvWindow.WaitKey(0); } } }
public Blob() { using (var imgSrc = new IplImage(FilePath.Image.Shapes, LoadMode.Color)) using (var imgBinary = new IplImage(imgSrc.Size, BitDepth.U8, 1)) using (var imgRender = new IplImage(imgSrc.Size, BitDepth.U8, 3)) using (var imgContour = new IplImage(imgSrc.Size, BitDepth.U8, 3)) using (var imgPolygon = new IplImage(imgSrc.Size, BitDepth.U8, 3)) { Cv.CvtColor(imgSrc, imgBinary, ColorConversion.BgrToGray); Cv.Threshold(imgBinary, imgBinary, 100, 255, ThresholdType.Binary); CvBlobs blobs = new CvBlobs(); blobs.Label(imgBinary); foreach (KeyValuePair <int, CvBlob> item in blobs) { CvBlob b = item.Value; Console.WriteLine("{0} | Centroid:{1} Area:{2}", item.Key, b.Centroid, b.Area); CvContourChainCode cc = b.Contour; cc.Render(imgContour); CvContourPolygon polygon = cc.ConvertToPolygon(); foreach (CvPoint p in polygon) { imgPolygon.Circle(p, 1, CvColor.Red, -1); } /* * CvPoint2D32f circleCenter; * float circleRadius; * GetEnclosingCircle(polygon, out circleCenter, out circleRadius); * imgPolygon.Circle(circleCenter, (int) circleRadius, CvColor.Green, 2); */ } blobs.RenderBlobs(imgSrc, imgRender); using (new CvWindow("render", imgRender)) using (new CvWindow("contour", imgContour)) using (new CvWindow("polygon vertices", imgPolygon)) { Cv.WaitKey(0); } } }
// Update is called once per frame void Update() { IplImage frame = Cv.QueryFrame(capture); imgBinary = new IplImage(frame.Size, BitDepth.U8, 1); imgLabel = new IplImage(frame.Size, BitDepth.F32, 1); imgRender = new IplImage(frame.Size, BitDepth.U8, 3); imgContour = new IplImage(frame.Size, BitDepth.U8, 3); imgPolygon = new IplImage(frame.Size, BitDepth.U8, 3); Color[] cols = new Color[texture.width * texture.height]; Cv.CvtColor(frame, imgBinary, ColorConversion.BgrToGray); Cv.Threshold(imgBinary, imgBinary, 100, 255, ThresholdType.Binary); CvBlobs blobs = new CvBlobs(); uint result = blobs.Label(imgBinary, imgLabel); foreach (KeyValuePair <uint, CvBlob> item in blobs) { CvBlob b = item.Value; //Console.WriteLine ("{0} | Centroid:{1} Area:{2}", item.Key, b.Centroid, b.Area); CvContourChainCode cc = b.Contour; cc.RenderContourChainCode(imgContour); CvContourPolygon polygon = cc.ConvertChainCodesToPolygon(); foreach (CvPoint p in polygon) { imgPolygon.Circle(p, 1, CvColor.Red, -1); } } blobs.RenderBlobs(imgLabel, frame, imgRender); for (int y = 0; y < texture.height; y++) { for (int x = 0; x < texture.width; x++) { CvColor col = imgRender.Get2D(y, x); cols[y * texture.width + x] = new Color(col.R / 255.0f, col.G / 255.0f, col.B / 255.0f, 1.0f); } } // int t2 = System.Environment.TickCount; texture.SetPixels(cols); //int t3 = System.Environment.TickCount; //Debug.Log("t2-t1=" + (t2 - t1) + " t3-t2=" + (t3 - t2)); texture.Apply(); }
/// <summary> /// Extracts MSER by C-style code (cvExtractMSER) /// </summary> /// <param name="imgGray"></param> /// <param name="imgDst"></param> private void CStyleMSER(IplImage imgGray, IplImage imgDst) { using (CvMemStorage storage = new CvMemStorage()) { CvContour[] contours; CvMSERParams param = new CvMSERParams(); Cv.ExtractMSER(imgGray, null, out contours, storage, param); foreach (CvContour c in contours) { CvColor color = CvColor.Random(); for (int i = 0; i < c.Total; i++) { imgDst.Circle(c[i].Value, 1, color); } } } }
public IplImage FaceDetection(IplImage src) { // https://076923.github.io/posts/C-opencv-29/ haarface = new IplImage(src.Size, BitDepth.U8, 3); // harrface 는 원본을 복사한 이미지 Cv.Copy(src, haarface); const double scale = 0.9; // scale은 검출되는 이미지의 비율 const double scaleFactor = 1.139; // 얼굴 검출시에 사용되는 상수 const int minNeighbors = 1; // 얼굴 검출시에 사용되는 상수 using (IplImage Detected_image = new IplImage(new CvSize(Cv.Round(src.Width / scale), Cv.Round(src.Height / scale)), BitDepth.U8, 1)) { // 검출되는 이미지인 detected image 를 scale의 비율에 맞게 재조정 함 using (IplImage gray = new IplImage(src.Size, BitDepth.U8, 1)) // 이미지의 크기를 조정 { Cv.CvtColor(src, gray, ColorConversion.BgrToGray); Cv.Resize(gray, Detected_image, Interpolation.Linear); Cv.EqualizeHist(Detected_image, Detected_image); // 이미지의 화상을 평탄화 (어둡고 밝은 부분이 조정됨) } using (CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile("../../haarcascade_frontalface_alt.xml")) using (CvMemStorage storage = new CvMemStorage()) // 메모리에 저장소 생성 { CvSeq <CvAvgComp> faces = Cv.HaarDetectObjects(Detected_image, cascade, storage, scaleFactor, minNeighbors, HaarDetectionType.FindBiggestObject, new CvSize(90, 90), new CvSize(0, 0)); // detected_image = 탐지할 이미지, cascade = storage = 메모리가 저장될 저장소 , HarrDetectionType : 작동 모드 if (faces.Total == 1 && checking == false) { checking = true; send_picture(); } for (int i = 0; i < faces.Total; i++) { CvRect r = faces[i].Value.Rect; CvPoint center = new CvPoint { X = Cv.Round((r.X + r.Width * 0.5) * scale), Y = Cv.Round((r.Y + r.Height * 0.5) * scale) }; int radius = Cv.Round((r.Width + r.Height) * 0.25 * scale); haarface.Circle(center, radius, CvColor.Black, 3, LineType.AntiAlias, 0); } } return(haarface); } }
public Blob() { using (IplImage imgSrc = new IplImage(Const.ImageShapes, LoadMode.Color)) using (IplImage imgBinary = new IplImage(imgSrc.Size, BitDepth.U8, 1)) using (IplImage imgLabel = new IplImage(imgSrc.Size, BitDepth.F32, 1)) using (IplImage imgRender = new IplImage(imgSrc.Size, BitDepth.U8, 3)) using (IplImage imgContour = new IplImage(imgSrc.Size, BitDepth.U8, 3)) using (IplImage imgPolygon = new IplImage(imgSrc.Size, BitDepth.U8, 3)) { Cv.CvtColor(imgSrc, imgBinary, ColorConversion.BgrToGray); Cv.Threshold(imgBinary, imgBinary, 100, 255, ThresholdType.Binary); CvBlobs blobs = new CvBlobs(); blobs.Label(imgBinary); foreach (KeyValuePair <int, CvBlob> item in blobs) { CvBlob b = item.Value; Console.WriteLine("{0} | Centroid:{1} Area:{2}", item.Key, b.Centroid, b.Area); CvContourChainCode cc = b.Contour; cc.Render(imgContour); CvContourPolygon polygon = cc.ConvertToPolygon(); foreach (CvPoint p in polygon) { imgPolygon.Circle(p, 1, CvColor.Red, -1); } } blobs.RenderBlobs(imgSrc, imgRender); using (new CvWindow("render", imgRender)) using (new CvWindow("contour", imgContour)) using (new CvWindow("polygon vertices", imgPolygon)) { Cv.WaitKey(0); } } }
/// <summary> /// /// </summary> /// <param name="img"></param> /// <param name="fp"></param> /// <param name="color"></param> private void DrawSubdivPoint(IplImage img, CvPoint2D32f fp, CvColor color) { img.Circle(fp, 3, color, Cv.FILLED, LineType.AntiAlias, 0); }
/// <summary> /// Classical Multidimensional Scaling /// </summary> public MDS() { // creates distance matrix int size = CityDistance.GetLength(0); CvMat t = new CvMat(size, size, MatrixType.F64C1, CityDistance); // adds Torgerson's additive constant to t t += Torgerson(t); // squares all elements of t t.Mul(t, t); // centering matrix G CvMat g = CenteringMatrix(size); // calculates inner product matrix B CvMat b = g * t * g.T() * -0.5; // calculates eigenvalues and eigenvectors of B CvMat vectors = new CvMat(size, size, MatrixType.F64C1); CvMat values = new CvMat(size, 1, MatrixType.F64C1); Cv.EigenVV(b, vectors, values); for (int r = 0; r < values.Rows; r++) { if (values[r] < 0) { values[r] = 0; } } // multiplies sqrt(eigenvalue) by eigenvector CvMat result = vectors.GetRows(0, 2); for (int r = 0; r < result.Rows; r++) { for (int c = 0; c < result.Cols; c++) { result[r, c] *= Math.Sqrt(values[r]); } } // scaling Cv.Normalize(result, result, 0, 800, NormType.MinMax); //Console.WriteLine(vectors); //Console.WriteLine(values); //Console.WriteLine(result); // opens a window using (IplImage img = new IplImage(800, 600, BitDepth.U8, 3)) using (CvFont font = new CvFont(FontFace.HersheySimplex, 0.5f, 0.5f)) using (CvWindow window = new CvWindow("City Location Estimation")) { img.Zero(); for (int c = 0; c < size; c++) { double x = result[0, c]; double y = result[1, c]; x = x * 0.7 + img.Width * 0.1; y = y * 0.7 + img.Height * 0.1; img.Circle((int)x, (int)y, 5, CvColor.Red, -1); img.PutText(CityNames[c], new CvPoint((int)x + 5, (int)y + 10), font, CvColor.White); } window.Image = img; Cv.WaitKey(); } }
void timer2_test(object sender, EventArgs e)/*face recognize (1frame)*/ { decide_frontface_csv = 1; var bmp = videoSourcePlayer1.GetCurrentVideoFrame(); bmp.Save("a.bmp"); CvColor[] colors = new CvColor[] { new CvColor(0, 0, 255), new CvColor(0, 128, 255), new CvColor(0, 255, 255), new CvColor(0, 255, 0), new CvColor(255, 128, 0), new CvColor(255, 255, 0), new CvColor(255, 0, 0), new CvColor(255, 0, 255), }; const double Scale = 1.14; const double ScaleFactor = 1.0850; const int MinNeighbors = 2; //using (IplImage img = new IplImage(@"C:\Yalta.jpg", LoadMode.Color)) using (IplImage img = new IplImage(@"a.bmp", LoadMode.Color)) //using (IplImage img = new IplImage(@"C:\Lenna.jpg", LoadMode.Color)) using (IplImage smallImg = new IplImage(new CvSize(Cv.Round(img.Width / Scale), Cv.Round(img.Height / Scale)), BitDepth.U8, 1)) { // 顔検出用の画像の生成 using (IplImage gray = new IplImage(img.Size, BitDepth.U8, 1)) { Cv.CvtColor(img, gray, ColorConversion.BgrToGray); Cv.Resize(gray, smallImg, Interpolation.Linear); Cv.EqualizeHist(smallImg, smallImg); } //using (CvHaarClassifierCascade cascade = Cv.Load<CvHaarClassifierCascade>(Const.XmlHaarcascade)) // どっちでも可 using (CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile("haarcascade_frontalface_default.xml")) // using (CvMemStorage storage = new CvMemStorage()) { storage.Clear(); // 顔の検出 Stopwatch watch = Stopwatch.StartNew(); CvSeq <CvAvgComp> faces = Cv.HaarDetectObjects(smallImg, cascade, storage, ScaleFactor, MinNeighbors, 0, new CvSize(100, 100), new CvSize(1000, 1000)); //new CvSize(30, 30) watch.Stop(); //Console.WriteLine("detection time = {0}ms\n", watch.ElapsedMilliseconds); // 検出した箇所にまるをつける for (int i = 0; i < faces.Total; i++) { CvRect r = faces[i].Value.Rect; CvPoint center = new CvPoint { X = Cv.Round((r.X + r.Width * 0.5) * Scale), Y = Cv.Round((r.Y + r.Height * 0.5) * Scale) }; int radius = Cv.Round((r.Width + r.Height) * 0.25 * Scale); img.Circle(center, radius, colors[i % 8], 3, LineType.AntiAlias, 0); if (radius >= 50) { frontface_check_num++; break; } System.Console.WriteLine("radius = {0}!!!!!!!!!!!!\n", radius); } } // CvWindow.ShowImages(img); } frametest++; }
// 別スレッド処理(キャプチャー) private void worker_DoWork(object sender, DoWorkEventArgs e) { BackgroundWorker bw = (BackgroundWorker)sender; Stopwatch sw = new Stopwatch(); string str; id = 0; //PID送信用UDP //バインドするローカルポート番号 // FSI_PID_DATA pid_data = new FSI_PID_DATA(); int localPort = mmFsiUdpPortMT3PV; System.Net.Sockets.UdpClient udpc2 = null;; /* try * { * udpc2 = new System.Net.Sockets.UdpClient(localPort); * * } * catch (Exception ex) * { * //匿名デリゲートで表示する * this.Invoke(new dlgSetString(ShowRText), new object[] { richTextBox1, ex.ToString() }); * } */ //videoInputオブジェクト const int DeviceID = 0; // 0; // 3 (pro), 4(piccolo) 7(DMK) const int CaptureFps = 30; // 30 int interval = (int)(1000 / CaptureFps / 10); const int CaptureWidth = 640; const int CaptureHeight = 480; // 画像保存枚数 int mmFsiPostRec = 60; int save_counter = mmFsiPostRec; using (VideoInput vi = new VideoInput()) { vi.SetIdealFramerate(DeviceID, CaptureFps); vi.SetupDevice(DeviceID, CaptureWidth, CaptureHeight); int width = vi.GetWidth(DeviceID); int height = vi.GetHeight(DeviceID); using (IplImage img = new IplImage(width, height, BitDepth.U8, 3)) using (IplImage img_dark8 = Cv.LoadImage(@"C:\piccolo\MT3V_dark.bmp", LoadMode.GrayScale)) //using (IplImage img_dark = new IplImage(width, height, BitDepth.U8, 3)) using (IplImage img_mono = new IplImage(width, height, BitDepth.U8, 1)) using (IplImage img2 = new IplImage(width, height, BitDepth.U8, 1)) // using (Bitmap bitmap = new Bitmap(width, height, PixelFormat.Format24bppRgb)) using (CvFont font = new CvFont(FontFace.HersheyComplex, 0.45, 0.45)) //using (CvWindow window0 = new CvWindow("FIFO0", WindowMode.AutoSize)) { //this.Size = new Size(width + 12, height + 148); double min_val, max_val; CvPoint min_loc, max_loc; int size = 15; int size2x = size / 2; int size2y = size / 2; int crop = 20; double sigma = 3; long elapsed0 = 0, elapsed1 = 0; double framerate0 = 0, framerate1 = 0; double alfa_fr = 0.99; sw.Start(); while (bw.CancellationPending == false) { if (vi.IsFrameNew(DeviceID)) { DateTime dn = DateTime.Now; //取得時刻 vi.GetPixels(DeviceID, img.ImageData, false, true); // 画面time表示 str = String.Format("Wide ID:{0:D2} ", id) + dn.ToString("yyyyMMdd_HHmmss_fff");// +String.Format(" ({0,000:F2},{1,000:F2}) ({2,000:0},{3,000:0})({4,0:F1})", gx, gy, max_loc.X, max_loc.Y, max_val); img.PutText(str, new CvPoint(10, 475), font, new CvColor(0, 100, 40)); Cv.CvtColor(img, img_mono, ColorConversion.BgrToGray); Cv.Sub(img_mono, img_dark8, imgdata.img); // dark減算 imgdata.id = ++id; imgdata.t = dn; imgdata.ImgSaveFlag = !(ImgSaveFlag != 0); //int->bool変換 if (fifo.Count == MaxFrame - 1) { fifo.EraseLast(); } fifo.InsertFirst(imgdata); #region 位置検出1//MinMaxLoc /*// 位置検出 * Cv.Smooth(imgdata.img, img2, SmoothType.Gaussian, size, 0, sigma, 0); * CvRect rect; * if (PvMode == MyDETECT) * { * rect = new CvRect( (int)(gx+0.5) - size, (int)(gy+0.5) - size, size*2, size*2); * Cv.SetImageROI(img2, rect); * Cv.MinMaxLoc(img2, out min_val, out max_val, out min_loc, out max_loc, null); * Cv.ResetImageROI(img2); * max_loc.X += (int)(gx + 0.5) - size; // 基準点が(1,1)のため+1 * max_loc.Y += (int)(gy + 0.5) - size; * } * else * { * rect = new CvRect(crop, crop, width - (crop + crop), height - (crop + crop)); * Cv.SetImageROI(img2, rect); * Cv.MinMaxLoc(img2, out min_val, out max_val, out min_loc, out max_loc, null); * Cv.ResetImageROI(img2); * max_loc.X += crop; // 基準点が(1,1)のため+1 * max_loc.Y += crop; * } * window0.ShowImage(img2); * * double m00, m10, m01; * size2x = size2y = size / 2; * if (max_loc.X - size2x < 0) size2x = max_loc.X; * if (max_loc.Y - size2y < 0) size2y = max_loc.Y; * if (max_loc.X + size2x >= width ) size2x = width -max_loc.X -1; * if (max_loc.Y + size2y >= height) size2y = height -max_loc.Y -1; * rect = new CvRect(max_loc.X - size2x, max_loc.Y - size2y, size, size); * CvMoments moments; * Cv.SetImageROI(img2, rect); * Cv.Moments(img2, out moments, false); * Cv.ResetImageROI(img2); * m00 = Cv.GetSpatialMoment(moments, 0, 0); * m10 = Cv.GetSpatialMoment(moments, 1, 0); * m01 = Cv.GetSpatialMoment(moments, 0, 1); * gx = max_loc.X - size2x + m10 / m00; * gy = max_loc.Y - size2y + m01 / m00; */ #endregion #region 位置検出2 //Blob Cv.Threshold(imgdata.img, img2, threshold_blob, 255, ThresholdType.Binary); //2ms blobs.Label(img2, imgLabel); //1.4ms max_label = blobs.GreaterBlob(); elapsed1 = sw.ElapsedTicks; //1.3ms if (blobs.Count > 1 && gx >= 0) { uint min_area = (uint)(threshold_min_area * blobs[max_label].Area); blobs.FilterByArea(min_area, uint.MaxValue); //0.001ms // 最適blobの選定(area大 かつ 前回からの距離小) double x = blobs[max_label].Centroid.X; double y = blobs[max_label].Centroid.Y; uint area = blobs[max_label].Area; //CvRect rect; distance_min = ((x - gx) * (x - gx) + (y - gy) * (y - gy)); //Math.Sqrt() foreach (var item in blobs) { //Console.WriteLine("{0} | Centroid:{1} Area:{2}", item.Key, item.Value.Centroid, item.Value.Area); x = item.Value.Centroid.X; y = item.Value.Centroid.Y; //rect = item.Value.Rect; distance = ((x - gx) * (x - gx) + (y - gy) * (y - gy)); //将来はマハラノビス距離 if (distance < distance_min) { d_val = (item.Value.Area) / max_area; if (distance <= 25) //近距離(5pix) { if (d_val >= 0.4) //&& d_val <= 1.2) { max_label = item.Key; distance_min = distance; } } else { if (d_val >= 0.8 && d_val <= 1.5) { max_label = item.Key; distance_min = distance; } } } //w.WriteLine("{0} {1} {2} {3} {4}", dis, dv, i, item.Key, item.Value.Area); } //gx = x; gy = y; max_val = area; } if (max_label > 0) { maxBlob = blobs[max_label]; max_centroid = maxBlob.Centroid; gx = max_centroid.X; gy = max_centroid.Y; max_area = maxBlob.Area; if (this.States == SAVE) { Pid_Data_Send(); timerSavePostTime.Stop(); timerSaveMainTime.Stop(); timerSaveMainTime.Start(); } } else { gx = gy = 0; max_area = 0; } #endregion // 画面表示 str = String.Format("ID:{0:D2} ", id) + dn.ToString("yyyyMMdd_HHmmss_fff") + String.Format(" ({0,000:F2},{1,000:F2}) ({2,000:0},{3,000:0})({4,0:F1})", gx, gy, xoa, yoa, max_area); if (imgdata.ImgSaveFlag) { str += " True"; } img.PutText(str, new CvPoint(10, 20), font, new CvColor(0, 255, 100)); img.Circle(new CvPoint((int)gx, (int)gy), 10, new CvColor(255, 255, 100)); bw.ReportProgress(0, img); // 処理速度 elapsed0 = sw.ElapsedTicks - elapsed1; // 1frameのticks elapsed1 = sw.ElapsedTicks; framerate0 = alfa_fr * framerate1 + (1 - alfa_fr) * (Stopwatch.Frequency / (double)elapsed0); framerate1 = framerate0; str = String.Format("fr time = {0}({1}){2:F1}", sw.Elapsed, id, framerate0); //," ", sw.ElapsedMilliseconds); //匿名デリゲートで現在の時間をラベルに表示する this.Invoke(new dlgSetString(ShowText), new object[] { textBox1, str }); //img.ToBitmap(bitmap); //pictureBox1.Refresh(); } Application.DoEvents(); Thread.Sleep(interval); } this.States = STOP; this.Invoke(new dlgSetColor(SetColor), new object[] { ObsStart, this.States }); this.Invoke(new dlgSetColor(SetColor), new object[] { ObsEndButton, this.States }); vi.StopDevice(DeviceID); //udpc2.Close(); } } }
// Update is called once per frame void Update() { if (pp == null) { return; } if (!pp.AcquireFrame(false)) { return; } //IplImage frame = Cv.QueryFrame(capture); if (rgbTexture != null) { Debug.Log("rgb not null"); if (pp.QueryRGB(rgbTexture)) { Debug.Log("query rgb done"); //rgbTexture.Apply(); Debug.Log("de pixelo: " + rgbTexture.GetPixels()[0]); IplImage frame = Texture2DtoIplImage(rgbTexture); using (IplImage img = Cv.CloneImage(frame)) using (IplImage smallImg = new IplImage(new CvSize(Cv.Round(img.Width / Scale), Cv.Round(img.Height / Scale)), BitDepth.U8, 1)) { using (IplImage gray = new IplImage(img.Size, BitDepth.U8, 1)) { Cv.CvtColor(img, gray, ColorConversion.BgrToGray); Cv.Resize(gray, smallImg, Interpolation.Linear); Cv.EqualizeHist(smallImg, smallImg); } using (CvMemStorage storage = new CvMemStorage()) { storage.Clear(); CvSeq <CvAvgComp> faces = Cv.HaarDetectObjects(smallImg, cascade, storage, ScaleFactor, MinNeighbors, 0, new CvSize(64, 64)); for (int i = 0; i < faces.Total; i++) { CvRect r = faces[i].Value.Rect; CvPoint center = new CvPoint { X = Cv.Round((r.X + r.Width * 0.5) * Scale), Y = Cv.Round((r.Y + r.Height * 0.5) * Scale) }; int radius = Cv.Round((r.Width + r.Height) * 0.25 * Scale); img.Circle(center, radius, colors[i % 8], 3, LineType.AntiAlias, 0); } if (faces.Total > 0) { CvRect r = faces[0].Value.Rect; //facepos = new Vector2((r.X + r.Width / 2.0f) / CAPTURE_WIDTH, (r.Y + r.Height / 2.0f) / CAPTURE_HEIGHT); } } Cv.ShowImage("FaceDetect", img); } } // endif queryirasimage else { Debug.Log("failoo"); } } // endif rgbTexture !null else { Debug.Log("rgb NULL"); } pp.ReleaseFrame(); }