public IplImage InpaintImage(IplImage src) { inpaint = new IplImage(src.Size, BitDepth.U8, 3); IplImage paint = src.Clone(); IplImage mask = new IplImage(src.Size, BitDepth.U8, 1); CvWindow win_Paint = new CvWindow("Paint", WindowMode.AutoSize, paint); CvPoint prevPt = new CvPoint(-1, -1); win_Paint.OnMouseCallback += delegate(MouseEvent eve, int x, int y, MouseEvent flag) { if (eve == MouseEvent.LButtonDown) { prevPt = new CvPoint(x, y); } else if (eve == MouseEvent.LButtonUp || (flag & MouseEvent.FlagLButton) == 0) { prevPt = new CvPoint(-1, -1); } else if (eve == MouseEvent.MouseMove && (flag & MouseEvent.FlagLButton) != 0) { CvPoint pt = new CvPoint(x, y); Cv.DrawLine(mask, prevPt, pt, CvColor.White, 5, LineType.AntiAlias, 0); Cv.DrawLine(paint, prevPt, pt, CvColor.White, 5, LineType.AntiAlias, 0); prevPt = pt; win_Paint.ShowImage(paint); } }; bool repeat = true; while (repeat) { switch (CvWindow.WaitKey(0)) { case 'r': mask.SetZero(); Cv.Copy(src, paint); win_Paint.ShowImage(paint); break; case '\r': CvWindow win_Inpaint = new CvWindow("Inpainted", WindowMode.AutoSize); Cv.Inpaint(paint, mask, inpaint, 3, InpaintMethod.NS); win_Inpaint.ShowImage(inpaint); break; case (char)27: CvWindow.DestroyAllWindows(); repeat = false; break; } } return(inpaint); }
public VideoWriter() { // (1)カメラに対するキャプチャ構造体を作成する using (CvCapture capture = CvCapture.FromCamera(0)) { // (2)キャプチャサイズを取得する(この設定は,利用するカメラに依存する) int width = capture.FrameWidth; int height = capture.FrameHeight; double fps = 15;//capture.Fps; // (3)ビデオライタ構造体を作成する using (CvVideoWriter writer = new CvVideoWriter("cap.avi", FourCC.Prompt, fps, new CvSize(width, height))) using (CvFont font = new CvFont(FontFace.HersheyComplex, 0.7, 0.7)) using (CvWindow window = new CvWindow("Capture", WindowMode.AutoSize)) { // (4)カメラから画像をキャプチャし,ファイルに書き出す for (int frames = 0; ; frames++) { IplImage frame = capture.QueryFrame(); string str = string.Format("{0}[frame]", frames); frame.PutText(str, new CvPoint(10, 20), font, new CvColor(0, 255, 100)); writer.WriteFrame(frame); window.ShowImage(frame); int key = CvWindow.WaitKey((int)(1000 / fps)); if (key == '\x1b') { break; } } } } }
static void ShowImage(IplImage image) { using (CvWindow win = new CvWindow("OpenCV Window")) { win.ShowImage(image); CvWindow.WaitKey(0); } //ShowImages(new IplImage[] { image }); }
public Histogram() { // cvCalcHist // コントラストや明度をいろいろ変えられるサンプル const int histSize = 64; float[] range0 = { 0, 256 }; float[][] ranges = { range0 }; // 画像の読み込み using (IplImage srcImg = new IplImage(Const.ImageLenna, LoadMode.GrayScale)) using (IplImage dstImg = srcImg.Clone()) using (IplImage histImg = new IplImage(new CvSize(400, 400), BitDepth.U8, 1)) using (CvHistogram hist = new CvHistogram(new int[] { histSize }, HistogramFormat.Array, ranges, true)) { using (CvWindow windowImage = new CvWindow("image", WindowMode.AutoSize)) using (CvWindow windowHist = new CvWindow("histogram", WindowMode.AutoSize)) { // トラックバーが動かされた時の処理 CvTrackbar ctBrightness = null; CvTrackbar ctContrast = null; CvTrackbarCallback callback = delegate(int pos) { int brightness = ctBrightness.Pos - 100; int contrast = ctContrast.Pos - 100; // LUTの適用 byte[] lut = CalcLut(contrast, brightness); srcImg.LUT(dstImg, lut); // ヒストグラムの描画 CalcHist(dstImg, hist); DrawHist(histImg, hist, histSize); // ウィンドウに表示 windowImage.ShowImage(dstImg); windowHist.ShowImage(histImg); dstImg.Zero(); histImg.Zero(); }; // トラックバーの作成 // (OpenCVでは現在位置にポインタを渡すことでトラックバーの位置の変化が取得できるが、 // .NETではGCによりポインタが移動してしまうので廃止した。別の方法でうまく取得すべし。) ctBrightness = windowImage.CreateTrackbar("brightness", 100, 200, callback); ctContrast = windowImage.CreateTrackbar("contrast", 100, 200, callback); // 初回描画 callback(0); // キー入力待ち Cv.WaitKey(0); } } }
public ConvexHull() { using (IplImage img = Cv.CreateImage(new CvSize(500, 500), BitDepth.U8, 3)) using (CvWindow window = new CvWindow("hull")) { Random rand = new Random(); for (; ;) { int count = rand.Next() % 100 + 1; // create sequence of random points CvPoint[] ptseq = new CvPoint[count]; for (int i = 0; i < ptseq.Length; i++) { ptseq[i] = new CvPoint { X = rand.Next() % (img.Width / 2) + img.Width / 4, Y = rand.Next() % (img.Height / 2) + img.Height / 4 }; } // draw points Cv.Zero(img); foreach (CvPoint pt in ptseq) { Cv.Circle(img, pt, 2, new CvColor(255, 0, 0), -1); } // find hull CvPoint[] hull; Cv.ConvexHull2(ptseq, out hull, ConvexHullOrientation.Clockwise); // draw hull CvPoint pt0 = hull.Last(); foreach (CvPoint pt in hull) { Cv.Line(img, pt0, pt, CvColor.Green); pt0 = pt; } window.ShowImage(img); if (Cv.WaitKey(0) == 27) // 'ESC' { break; } } } }
private static void Track() { using (var video = new CvCapture("data/bach.mp4")) { IplImage frame = null; IplImage gray = null; IplImage binary = null; IplImage render = null; IplImage renderTracks = null; CvTracks tracks = new CvTracks(); CvWindow window = new CvWindow("render"); CvWindow windowTracks = new CvWindow("tracks"); for (int i = 0; ; i++) { frame = video.QueryFrame(); //if (frame == null) // frame = new IplImage("data/shapes.png"); if (gray == null) { gray = new IplImage(frame.Size, BitDepth.U8, 1); binary = new IplImage(frame.Size, BitDepth.U8, 1); render = new IplImage(frame.Size, BitDepth.U8, 3); renderTracks = new IplImage(frame.Size, BitDepth.U8, 3); } render.Zero(); renderTracks.Zero(); Cv.CvtColor(frame, gray, ColorConversion.BgrToGray); Cv.Threshold(gray, binary, 0, 255, ThresholdType.Otsu); CvBlobs blobs = new CvBlobs(binary); CvBlobs newBlobs = new CvBlobs(blobs .OrderByDescending(pair => pair.Value.Area) .Take(200) .ToDictionary(pair => pair.Key, pair => pair.Value), blobs.Labels); newBlobs.RenderBlobs(binary, render); window.ShowImage(render); newBlobs.UpdateTracks(tracks, 10.0, Int32.MaxValue); tracks.Render(binary, renderTracks); windowTracks.ShowImage(renderTracks); Cv.WaitKey(200); Console.WriteLine(i); } } }
public Moments() { // (1)画像を読み込む.3チャンネル画像の場合はCOIがセットされていなければならない using (IplImage srcImg = new IplImage(Const.ImageLenna, LoadMode.AnyColor | LoadMode.AnyDepth)) { if (srcImg.NChannels == 3 && srcImg.COI == 0) { srcImg.COI = 1; } // (2)入力画像の3次までの画像モーメントを計算する CvMoments moments = new CvMoments(srcImg, false); srcImg.COI = 0; // (3)モーメントやHuモーメント不変量を,得られたCvMoments構造体の値を使って計算する. double spatialMoment = moments.GetSpatialMoment(0, 0); double centralMoment = moments.GetCentralMoment(0, 0); double normCMoment = moments.GetNormalizedCentralMoment(0, 0); CvHuMoments huMoments = new CvHuMoments(moments); // (4)得られたモーメントやHuモーメント不変量を文字として画像に描画 using (CvFont font = new CvFont(FontFace.HersheySimplex, 1.0, 1.0, 0, 2, LineType.Link8)) { string[] text = new string[10]; text[0] = string.Format("spatial={0:F3}", spatialMoment); text[1] = string.Format("central={0:F3}", centralMoment); text[2] = string.Format("norm={0:F3}", spatialMoment); text[3] = string.Format("hu1={0:F10}", huMoments.Hu1); text[4] = string.Format("hu2={0:F10}", huMoments.Hu2); text[5] = string.Format("hu3={0:F10}", huMoments.Hu3); text[6] = string.Format("hu4={0:F10}", huMoments.Hu4); text[7] = string.Format("hu5={0:F10}", huMoments.Hu5); text[8] = string.Format("hu6={0:F10}", huMoments.Hu6); text[9] = string.Format("hu7={0:F10}", huMoments.Hu7); CvSize textSize = font.GetTextSize(text[0]); for (int i = 0; i < 10; i++) { srcImg.PutText(text[i], new CvPoint(10, (textSize.Height + 3) * (i + 1)), font, CvColor.Black); } } // (5)入力画像とモーメント計算結果を表示,キーが押されたときに終了 using (CvWindow window = new CvWindow("Image", WindowMode.AutoSize)) { window.ShowImage(srcImg); Cv.WaitKey(0); } } }
public IplImage BuildHist(IplImage src_tmp) { const int histSize = 64; float[] range0 = { 0, 256 }; float[][] ranges = { range0 }; // 화상의 읽기 using (IplImage srcImg = new IplImage(src_tmp.Size, BitDepth.U8, 1)) using (IplImage dstImg = new IplImage(src_tmp.Size, BitDepth.U8, 1)) using (IplImage histImg = new IplImage(new CvSize(400, 400), BitDepth.U8, 1)) using (CvHistogram hist = new CvHistogram(new int[] { histSize }, HistogramFormat.Array, ranges, true)) { src_tmp.CvtColor(srcImg, ColorConversion.BgrToGray); srcImg.Copy(dstImg); using (CvWindow windowImage = new CvWindow("변환된 이미지", WindowMode.AutoSize)) using (CvWindow windowHist = new CvWindow("히스토그램", WindowMode.AutoSize)) { // 트랙바가 동작되었을 때의 처리 CvTrackbar ctBrightness = null; CvTrackbar ctContrast = null; CvTrackbarCallback callback = delegate(int pos) { int brightness = ctBrightness.Pos - 100; int contrast = ctContrast.Pos - 100; // LUT의 적용 byte[] lut = CalcLut(contrast, brightness); srcImg.LUT(dstImg, lut); // 히스토그램 그리기 CalcHist(dstImg, hist); DrawHist(histImg, hist, histSize); // 윈도우에 표시 DstHist = histImg.Clone(); windowImage.ShowImage(dstImg); windowHist.ShowImage(histImg); dstImg.Zero(); histImg.Zero(); }; // 트랙바의 작성 ctBrightness = windowImage.CreateTrackbar("명도", 100, 200, callback); ctContrast = windowImage.CreateTrackbar("대조", 100, 200, callback); // 첫회 그리기 callback(0); // 키 입력대기 Cv.WaitKey(0); } return(DstHist); } }
static void LeafBorders() { using (CvWindow win2 = new CvWindow("OpenCv Window2")) using (CvWindow win = new CvWindow("OpenCv Window")) { using (IplImage src = Cv.LoadImage("Resources/maple_leaf.jpg", LoadMode.Color)) { //dst.FindContours() win2.ShowImage(src); win.ShowImage(GetLeafBorders(src)); } CvWindow.WaitKey(); win.Image.Dispose(); } }
public Moments() { using (IplImage srcImg = new IplImage(FilePath.Image.Lenna, LoadMode.AnyColor | LoadMode.AnyDepth)) { if (srcImg.NChannels == 3 && srcImg.COI == 0) { srcImg.COI = 1; } CvMoments moments = new CvMoments(srcImg, false); srcImg.COI = 0; double spatialMoment = moments.GetSpatialMoment(0, 0); double centralMoment = moments.GetCentralMoment(0, 0); double normCMoment = moments.GetNormalizedCentralMoment(0, 0); CvHuMoments huMoments = new CvHuMoments(moments); // drawing using (CvFont font = new CvFont(FontFace.HersheySimplex, 1.0, 1.0, 0, 2, LineType.Link8)) { string[] text = new string[10]; text[0] = string.Format("spatial={0:F3}", spatialMoment); text[1] = string.Format("central={0:F3}", centralMoment); text[2] = string.Format("norm={0:F3}", spatialMoment); text[3] = string.Format("hu1={0:F10}", huMoments.Hu1); text[4] = string.Format("hu2={0:F10}", huMoments.Hu2); text[5] = string.Format("hu3={0:F10}", huMoments.Hu3); text[6] = string.Format("hu4={0:F10}", huMoments.Hu4); text[7] = string.Format("hu5={0:F10}", huMoments.Hu5); text[8] = string.Format("hu6={0:F10}", huMoments.Hu6); text[9] = string.Format("hu7={0:F10}", huMoments.Hu7); CvSize textSize = font.GetTextSize(text[0]); for (int i = 0; i < 10; i++) { srcImg.PutText(text[i], new CvPoint(10, (textSize.Height + 3) * (i + 1)), font, CvColor.Black); } } using (var window = new CvWindow("Image", WindowMode.AutoSize)) { window.ShowImage(srcImg); Cv.WaitKey(0); } } }
public Histogram() { // cvCalcHist const int histSize = 64; float[] range0 = { 0, 256 }; float[][] ranges = { range0 }; using (IplImage srcImg = new IplImage(FilePath.Image.Lenna, LoadMode.GrayScale)) using (IplImage dstImg = srcImg.Clone()) using (IplImage histImg = new IplImage(new CvSize(400, 400), BitDepth.U8, 1)) using (CvHistogram hist = new CvHistogram(new int[] { histSize }, HistogramFormat.Array, ranges, true)) { using (CvWindow windowImage = new CvWindow("image", WindowMode.AutoSize)) using (CvWindow windowHist = new CvWindow("histogram", WindowMode.AutoSize)) { CvTrackbar ctBrightness = null; CvTrackbar ctContrast = null; CvTrackbarCallback callback = delegate(int pos) { int brightness = ctBrightness.Pos - 100; int contrast = ctContrast.Pos - 100; // perform LUT byte[] lut = CalcLut(contrast, brightness); srcImg.LUT(dstImg, lut); // draws histogram CalcHist(dstImg, hist); DrawHist(histImg, hist, histSize); windowImage.ShowImage(dstImg); windowHist.ShowImage(histImg); dstImg.Zero(); histImg.Zero(); }; ctBrightness = windowImage.CreateTrackbar("brightness", 100, 200, callback); ctContrast = windowImage.CreateTrackbar("contrast", 100, 200, callback); // initial action callback(0); Cv.WaitKey(0); } } }
public QtTest() { using (CvWindow window = new CvWindow("window", WindowMode.ExpandedGui)) using (IplImage img = new IplImage(Const.ImageLenna, LoadMode.Color)) { if (CvWindow.HasQt) { // cvAddText CvFont font = new CvFontQt("MS UI Gothic", 48, CvColor.Red, FontWeight.Bold, FontStyle.Italic); img.AddText("Hello Qt!!", new CvPoint(50, img.Height - 50), font); // cvDisplayOverlay, cvDisplayStatusBar window.DisplayOverlay("overlay text", 2000); window.DisplayStatusBar("statusbar text", 3000); // cvCreateButton CvButtonCallback buttonCallback = delegate(int state, object userdata) { Console.WriteLine("Button state:{0} userdata:{1} ({2})", state, userdata, userdata.GetType()); }; Cv.CreateButton("button1", buttonCallback, "my userstate", ButtonType.Checkbox, 0); Cv.CreateButton("button2", buttonCallback, 12345.6789, ButtonType.Checkbox, 0); // cvSaveWindowParameters //window.SaveWindowParameters(); } window.ShowImage(img); // cvCreateTrackbar2 CvTrackbarCallback2 trackbarCallback = delegate(int pos, object userdata) { Console.WriteLine("Trackbar pos:{0} userdata:{1} ({2})", pos, userdata, userdata.GetType()); }; window.CreateTrackbar2("trackbar1", 128, 256, trackbarCallback, "foobar"); Cv.WaitKey(); } }
public static CameraCalibrationData CalibrateLens(List <Bitmap> bitmaps, int calibrationTakes) { int ImageNum = calibrationTakes; const int PatRow = 7; const int PatCol = 10; const int PatSize = PatRow * PatCol; int AllPoints = ImageNum * PatSize; const float ChessSize = 24.0f; // Convert bitmaps into Ipl Images IplImage[] srcImg = new IplImage[bitmaps.Count]; for (int i = 0; i < bitmaps.Count; i++) { srcImg[i] = OpenCVUtil.IplImageFromBitmap(bitmaps[i]); } CvPoint3D32f[, ,] objects = new CvPoint3D32f[ImageNum, PatRow, PatCol]; for (int i = 0; i < ImageNum; i++) { for (int j = 0; j < PatRow; j++) { for (int k = 0; k < PatCol; k++) { objects[i, j, k] = new CvPoint3D32f { X = j * ChessSize, Y = k * ChessSize, Z = 0.0f }; } } } CvMat objectPoints = new CvMat(AllPoints, 3, MatrixType.F32C1, objects); CvSize patternSize = new CvSize(PatCol, PatRow); int foundNum = 0; List <CvPoint2D32f> allCorners = new List <CvPoint2D32f>(AllPoints); int[] pointCountsValue = new int[ImageNum]; using (CvWindow window = new CvWindow("Calibration", WindowMode.AutoSize)) { for (int i = 0; i < ImageNum; i++) { CvPoint2D32f[] corners; bool found = Cv.FindChessboardCorners(srcImg[i], patternSize, out corners); Debug.Print("{0:D2}...", i); if (found) { Debug.Print("ok"); foundNum++; } else { Debug.Print("fail"); } using (IplImage srcGray = new IplImage(srcImg[i].Size, BitDepth.U8, 1)) { Cv.CvtColor(srcImg[i], srcGray, ColorConversion.BgrToGray); Cv.FindCornerSubPix(srcGray, corners, corners.Length, new CvSize(3, 3), new CvSize(-1, -1), new CvTermCriteria(20, 0.03)); Cv.DrawChessboardCorners(srcImg[i], patternSize, corners, found); pointCountsValue[i] = corners.Length; window.ShowImage(srcImg[i]); //Cv.WaitKey(0); } allCorners.AddRange(corners); } if (foundNum != ImageNum) { Debug.Assert(false); } } CvMat imagePoints = new CvMat(AllPoints, 1, MatrixType.F32C2, allCorners.ToArray()); CvMat pointCounts = new CvMat(ImageNum, 1, MatrixType.S32C1, pointCountsValue); CvMat intrinsic = new CvMat(3, 3, MatrixType.F64C1); CvMat distortion = new CvMat(1, 4, MatrixType.F64C1); CvMat rotation = new CvMat(ImageNum, 3, MatrixType.F64C1); CvMat translation = new CvMat(ImageNum, 3, MatrixType.F64C1); Cv.CalibrateCamera2(objectPoints, imagePoints, pointCounts, srcImg[0].Size, intrinsic, distortion, rotation, translation, CalibrationFlag.Default); CvMat subImagePoints, subObjectPoints; Cv.GetRows(imagePoints, out subImagePoints, 0, PatSize); Cv.GetRows(objectPoints, out subObjectPoints, 0, PatSize); CvMat rotation_ = new CvMat(1, 3, MatrixType.F32C1); CvMat translation_ = new CvMat(1, 3, MatrixType.F32C1); Cv.FindExtrinsicCameraParams2(subObjectPoints, subImagePoints, intrinsic, distortion, rotation_, translation_, false); //Cv.FindExtrinsicCameraParams2_(subObjectPoints, subImagePoints, intrinsic, distortion, rotation_, translation_, false); // Free IplImages foreach (IplImage img in srcImg) { img.Dispose(); } // Construct and return camera calibration data CameraCalibrationData camCal = new CameraCalibrationData(); camCal.IsValid = foundNum > (ImageNum / 2); // Consider valid if at least half were successes camCal.Intrinsic = intrinsic; camCal.Translation = translation_; camCal.Rotation = rotation_; camCal.Distortion = distortion; return(camCal); }
// Update is called once per frame void Update() { // Webカメラから1フレーム分の画像を取得 _Frame = Cv.QueryFrame(_Capture); // Webカメラ画像をコピーしてSTEP1の入力画像に使用 IplImage img = _Frame.Clone(); // STEP1用変数 IplImage smoothed = new IplImage(img.Size, BitDepth.U8, 3); // STEP2用変数 IplImage hsv = new IplImage(img.Size, BitDepth.U8, 3); // STEP3用変数 IplImage segmented = new IplImage(img.Size, BitDepth.U8, 1); CvMemStorage storage = new CvMemStorage(); // STEP4用変数 IplImage imgTmp = new IplImage(img.Size, BitDepth.U8, 1); IplImage morphology = new IplImage(img.Size, BitDepth.U8, 1); // STEP5用変数 CvSeq <CvPoint> contours; IplImage detected = _Frame.Clone(); // STEP1: ノイズ除去 Cv.Smooth(img, smoothed, SmoothType.Blur, 1); //Cv.ShowImage("window",smoothed); //Cv.Smooth(smoothed, smoothed, SmoothType.Gaussian,1); // STEP2: 色をRGBからHSVに変換 Cv.CvtColor(smoothed, hsv, ColorConversion.BgrToHsv); //Cv.ShowImage("window",hsv); // STEP3: 領域分割 storage.Clear(); Cv.InRangeS(hsv, new CvScalar((pointhsv.Val0) - RANGE_H, (pointhsv.Val1) - RANGE_S, (pointhsv.Val2) - RANGE_V), new CvScalar((pointhsv.Val0) + RANGE_H, (pointhsv.Val1) + RANGE_S, (pointhsv.Val2) + RANGE_V), segmented); //Cv.ShowImage("window",segmented); // STEP4: ノイズ除去 Cv.Dilate(segmented, imgTmp); Cv.Erode(imgTmp, imgTmp); Cv.Erode(imgTmp, imgTmp); Cv.Dilate(imgTmp, morphology); //Cv.ShowImage("window",morphology); // STEP5: 円の検出 Cv.FindContours(morphology, storage, out contours, CvContour.SizeOf, ContourRetrieval.Tree, ContourChain.ApproxNone); if (contours == null) { Debug.Log("PSMove is not detected"); } else { contours = Cv.ApproxPoly(contours, CvContour.SizeOf, storage, ApproxPolyMethod.DP, Cv.ContourPerimeter(contours) * CTR_PARAM, true); Cv.DrawContours(morphology, contours, new CvScalar(MAX_G, 0, 0), new CvScalar(0, MAX_B, 0), 3, -1); Cv.MinEnclosingCircle(contours, out _Center, out _Radius); Cv.DrawCircle(morphology, _Center, 2, new CvScalar(0, MAX_B, 0)); // STEP6: 画像をウィンドウに出力 Sz = fx * SPHERE_R / _Radius; Sx = -((_Center.X - ux) * Sz) / fx; Sy = -((_Center.Y - uy) * Sz) / fy; } _Window.ShowImage(_Frame); Cv.ShowImage("Original", img); Cv.ShowImage("STEP1:Smoothing", smoothed); Cv.ShowImage("STEP2:HSV", hsv); Cv.ShowImage("STEP3:Segmentation", segmented); Cv.ShowImage("STEP4:Morphology", morphology); Cv.ShowImage("STEP5:Detected", detected); }
public SURFSample() { // cvExtractSURF // SURFで対応点検出 // call cv::initModule_nonfree() before using SURF/SIFT. Cv2.InitModule_NonFree(); using (IplImage obj = Cv.LoadImage(Const.ImageSurfBox, LoadMode.GrayScale)) using (IplImage image = Cv.LoadImage(Const.ImageSurfBoxinscene, LoadMode.GrayScale)) using (IplImage objColor = Cv.CreateImage(obj.Size, BitDepth.U8, 3)) using (IplImage correspond = Cv.CreateImage(new CvSize(image.Width, obj.Height + image.Height), BitDepth.U8, 1)) { Cv.CvtColor(obj, objColor, ColorConversion.GrayToBgr); Cv.SetImageROI(correspond, new CvRect(0, 0, obj.Width, obj.Height)); Cv.Copy(obj, correspond); Cv.SetImageROI(correspond, new CvRect(0, obj.Height, correspond.Width, correspond.Height)); Cv.Copy(image, correspond); Cv.ResetImageROI(correspond); // SURFの処理 CvSURFPoint[] objectKeypoints, imageKeypoints; float[][] objectDescriptors, imageDescriptors; Stopwatch watch = Stopwatch.StartNew(); { CvSURFParams param = new CvSURFParams(500, true); Cv.ExtractSURF(obj, null, out objectKeypoints, out objectDescriptors, param); Console.WriteLine("Object Descriptors: {0}", objectDescriptors.Length); Cv.ExtractSURF(image, null, out imageKeypoints, out imageDescriptors, param); Console.WriteLine("Image Descriptors: {0}", imageDescriptors.Length); } watch.Stop(); Console.WriteLine("Extraction time = {0}ms", watch.ElapsedMilliseconds); watch.Reset(); watch.Start(); // シーン画像にある局所画像の領域を線で囲む CvPoint[] srcCorners = new CvPoint[4] { new CvPoint(0, 0), new CvPoint(obj.Width, 0), new CvPoint(obj.Width, obj.Height), new CvPoint(0, obj.Height) }; CvPoint[] dstCorners = LocatePlanarObject(objectKeypoints, objectDescriptors, imageKeypoints, imageDescriptors, srcCorners); if (dstCorners != null) { for (int i = 0; i < 4; i++) { CvPoint r1 = dstCorners[i % 4]; CvPoint r2 = dstCorners[(i + 1) % 4]; Cv.Line(correspond, new CvPoint(r1.X, r1.Y + obj.Height), new CvPoint(r2.X, r2.Y + obj.Height), CvColor.White); } } // 対応点同士を線で引く int[] ptPairs = FindPairs(objectKeypoints, objectDescriptors, imageKeypoints, imageDescriptors); for (int i = 0; i < ptPairs.Length; i += 2) { CvSURFPoint r1 = objectKeypoints[ptPairs[i]]; CvSURFPoint r2 = imageKeypoints[ptPairs[i + 1]]; Cv.Line(correspond, r1.Pt, new CvPoint(Cv.Round(r2.Pt.X), Cv.Round(r2.Pt.Y + obj.Height)), CvColor.White); } // 特徴点の場所に円を描く for (int i = 0; i < objectKeypoints.Length; i++) { CvSURFPoint r = objectKeypoints[i]; CvPoint center = new CvPoint(Cv.Round(r.Pt.X), Cv.Round(r.Pt.Y)); int radius = Cv.Round(r.Size * (1.2 / 9.0) * 2); Cv.Circle(objColor, center, radius, CvColor.Red, 1, LineType.AntiAlias, 0); } watch.Stop(); Console.WriteLine("Drawing time = {0}ms", watch.ElapsedMilliseconds); // ウィンドウに表示 using (CvWindow windowObject = new CvWindow("Object", WindowMode.AutoSize)) using (CvWindow windowCorrespond = new CvWindow("Object Correspond", WindowMode.AutoSize)) { windowObject.ShowImage(correspond); windowCorrespond.ShowImage(objColor); Cv.WaitKey(0); } } }
public Inpaint() { // cvInpaint Console.WriteLine( "Hot keys: \n" + "\tESC - quit the program\n" + "\tr - restore the original image\n" + "\ti or ENTER - run inpainting algorithm\n" + "\t\t(before running it, paint something on the image)\n" + "\ts - save the original image, mask image, original+mask image and inpainted image to desktop." ); using (IplImage img0 = new IplImage(FilePath.Image.Fruits, LoadMode.AnyDepth | LoadMode.AnyColor)) { using (IplImage img = img0.Clone()) using (IplImage inpaintMask = new IplImage(img0.Size, BitDepth.U8, 1)) using (IplImage inpainted = img0.Clone()) { inpainted.Zero(); inpaintMask.Zero(); using (CvWindow wImage = new CvWindow("image", WindowMode.AutoSize, img)) { CvPoint prevPt = new CvPoint(-1, -1); wImage.OnMouseCallback += delegate(MouseEvent ev, int x, int y, MouseEvent flags) { if (ev == MouseEvent.LButtonUp || (flags & MouseEvent.FlagLButton) == 0) { prevPt = new CvPoint(-1, -1); } else if (ev == MouseEvent.LButtonDown) { prevPt = new CvPoint(x, y); } else if (ev == MouseEvent.MouseMove && (flags & MouseEvent.FlagLButton) != 0) { CvPoint pt = new CvPoint(x, y); if (prevPt.X < 0) { prevPt = pt; } inpaintMask.Line(prevPt, pt, CvColor.White, 5, LineType.AntiAlias, 0); img.Line(prevPt, pt, CvColor.White, 5, LineType.AntiAlias, 0); prevPt = pt; wImage.ShowImage(img); } }; for (; ;) { switch ((char)CvWindow.WaitKey(0)) { case (char)27: // exit CvWindow.DestroyAllWindows(); return; case 'r': // restore original image inpaintMask.Zero(); img0.Copy(img); wImage.ShowImage(img); break; case 'i': // do Inpaint case '\r': CvWindow wInpaint = new CvWindow("inpainted image", WindowMode.AutoSize); img.Inpaint(inpaintMask, inpainted, 3, InpaintMethod.Telea); wInpaint.ShowImage(inpainted); break; case 's': // save images string desktop = Environment.GetFolderPath(Environment.SpecialFolder.Desktop); img0.SaveImage(Path.Combine(desktop, "original.png")); inpaintMask.SaveImage(Path.Combine(desktop, "mask.png")); img.SaveImage(Path.Combine(desktop, "original+mask.png")); inpainted.SaveImage(Path.Combine(desktop, "inpainted.png")); break; } } } } } }
public IplImage BinarizerMethod_Hist(IplImage src) { bina = new IplImage(src.Size, BitDepth.U8, 1); gray = this.GrayScale(src); int area = 200; int num = 0; int row = (src.Width % area == 0) ? (int)(src.Width / area) : (int)(src.Width / area + 1); int col = (src.Height % area == 0) ? (int)(src.Height / area) : (int)(src.Height / area + 1); int count = row * col; float[] data = new float[count]; IplImage[] piece = new IplImage[count]; CvRect[] piece_roi = new CvRect[count]; for (int x = 0; x < src.Width; x = x + area) { for (int y = 0; y < src.Height; y = y + area) { CvRect roi = new CvRect { X = x, Y = y, Width = area, Height = area }; if (roi.X + roi.Width > src.Width) { roi.Width = area - ((roi.X + roi.Width) - src.Width); } if (roi.Y + roi.Height > src.Height) { roi.Height = area - ((roi.Y + roi.Height) - src.Height); } gray.SetROI(roi); piece[num] = new IplImage(gray.ROI.Size, BitDepth.U8, 1); Cv.Copy(gray, piece[num]); gray.ResetROI(); //히스토그램 계산// int[] size = { area }; CvHistogram hist = new CvHistogram(size, HistogramFormat.Array); Cv.CalcHist(piece[num], hist); float minValue, maxValue; hist.GetMinMaxValue(out minValue, out maxValue); int highlevel = 0; for (int i = 0; i < area; i++) { if (maxValue == hist.Bins[i].Val0) { highlevel = i; } } piece_roi[num] = roi; data[num] = highlevel; num++; } } CvMat kernel = new CvMat(row, col, MatrixType.F32C1, data); Cv.Normalize(kernel, kernel, 255, 0, NormType.C); for (int r = 0; r < count; r++) { Cv.Threshold(piece[r], piece[r], kernel[r], 255, ThresholdType.Otsu); Cv.SetImageROI(bina, piece_roi[r]); Cv.Copy(piece[r], bina); bina.ResetROI(); } //37강 - 윈도우 창// CvWindow win = new CvWindow("window", WindowMode.StretchImage, src); win.Resize(640, 480); win.Move(100, 0); win.ShowImage(piece[0]); win.Close(); new CvWindow(piece[0]).Move(0, 0); new CvWindow(piece[1]).Move(0, 200); new CvWindow(piece[2]).Move(0, 400); //37강 - 윈도우 창// return(bina); }
public CalibrateCamera() { const int ImageNum = 3; const int PatRow = 7; const int PatCol = 10; const int PatSize = PatRow * PatCol; const int AllPoints = ImageNum * PatSize; const float ChessSize = 24.0f; IplImage[] srcImg = new IplImage[ImageNum]; for (int i = 0; i < ImageNum; i++) { srcImg[i] = new IplImage(string.Format(FilePath.Image.Calibration, i), LoadMode.Color); } CvPoint3D32f[,,] objects = new CvPoint3D32f[ImageNum, PatRow, PatCol]; for (int i = 0; i < ImageNum; i++) { for (int j = 0; j < PatRow; j++) { for (int k = 0; k < PatCol; k++) { objects[i, j, k] = new CvPoint3D32f { X = j * ChessSize, Y = k * ChessSize, Z = 0.0f }; } } } CvMat objectPoints = new CvMat(AllPoints, 3, MatrixType.F32C1, objects); CvSize patternSize = new CvSize(PatCol, PatRow); int foundNum = 0; List <CvPoint2D32f> allCorners = new List <CvPoint2D32f>(AllPoints); int[] pointCountsValue = new int[ImageNum]; using (CvWindow window = new CvWindow("Calibration", WindowMode.AutoSize)) { for (int i = 0; i < ImageNum; i++) { CvPoint2D32f[] corners; bool found = Cv.FindChessboardCorners(srcImg[i], patternSize, out corners); Debug.Print("{0:D2}...", i); if (found) { Debug.Print("ok"); foundNum++; } else { Debug.Print("fail"); } using (IplImage srcGray = new IplImage(srcImg[i].Size, BitDepth.U8, 1)) { Cv.CvtColor(srcImg[i], srcGray, ColorConversion.BgrToGray); Cv.FindCornerSubPix(srcGray, corners, corners.Length, new CvSize(3, 3), new CvSize(-1, -1), new CvTermCriteria(20, 0.03)); Cv.DrawChessboardCorners(srcImg[i], patternSize, corners, found); pointCountsValue[i] = corners.Length; window.ShowImage(srcImg[i]); Cv.WaitKey(0); } allCorners.AddRange(corners); } if (foundNum != ImageNum) { Debug.Assert(false); } } CvMat imagePoints = new CvMat(AllPoints, 1, MatrixType.F32C2, allCorners.ToArray()); CvMat pointCounts = new CvMat(ImageNum, 1, MatrixType.S32C1, pointCountsValue); CvMat intrinsic = new CvMat(3, 3, MatrixType.F64C1); CvMat distortion = new CvMat(1, 4, MatrixType.F64C1); CvMat rotation = new CvMat(ImageNum, 3, MatrixType.F64C1); CvMat translation = new CvMat(ImageNum, 3, MatrixType.F64C1); Cv.CalibrateCamera2(objectPoints, imagePoints, pointCounts, srcImg[0].Size, intrinsic, distortion, rotation, translation, CalibrationFlag.Default); CvMat subImagePoints, subObjectPoints; Cv.GetRows(imagePoints, out subImagePoints, 0, PatSize); Cv.GetRows(objectPoints, out subObjectPoints, 0, PatSize); CvMat rotation_ = new CvMat(1, 3, MatrixType.F32C1); CvMat translation_ = new CvMat(1, 3, MatrixType.F32C1); Cv.FindExtrinsicCameraParams2(subObjectPoints, subImagePoints, intrinsic, distortion, rotation_, translation_, false); //Cv.FindExtrinsicCameraParams2_(subObjectPoints, subImagePoints, intrinsic, distortion, rotation_, translation_, false); using (var fs = new CvFileStorage("camera.xml", null, OpenCvSharp.FileStorageMode.Write)) { fs.Write("intrinsic", intrinsic); fs.Write("rotation", rotation_); fs.Write("translation", translation_); fs.Write("distortion", distortion); } foreach (IplImage img in srcImg) { img.Dispose(); } // 書き込んだファイルを表示 Console.WriteLine(File.ReadAllText("camera.xml")); Console.Read(); }
public unsafe Kalman() { // cvKalmanPredict, cvKalmanCorrect // カルマンフィルタを用いて回転する点を追跡する // A matrix data float[] A = new float[] { 1, 1, 0, 1 }; using (IplImage img = new IplImage(500, 500, BitDepth.U8, 3)) using (CvKalman kalman = new CvKalman(2, 1, 0)) using (CvWindow window = new CvWindow("Kalman", WindowMode.AutoSize)) { // state is (phi, delta_phi) - angle and angle increment CvMat state = new CvMat(2, 1, MatrixType.F32C1); CvMat process_noise = new CvMat(2, 1, MatrixType.F32C1); // only phi (angle) is measured CvMat measurement = new CvMat(1, 1, MatrixType.F32C1); measurement.SetZero(); CvRandState rng = new CvRandState(0, 1, -1, DistributionType.Uniform); int code = -1; for (; ;) { Cv.RandSetRange(rng, 0, 0.1, 0); rng.DistType = DistributionType.Normal; Marshal.Copy(A, 0, kalman.TransitionMatrix.Data, A.Length); kalman.MeasurementMatrix.SetIdentity(1); kalman.ProcessNoiseCov.SetIdentity(1e-5); kalman.MeasurementNoiseCov.SetIdentity(1e-1); kalman.ErrorCovPost.SetIdentity(1); // choose random initial state Cv.Rand(rng, kalman.StatePost); rng.DistType = DistributionType.Normal; for (; ;) { float state_angle = state.DataSingle[0]; CvPoint state_pt = CalcPoint(img, state_angle); // predict point position CvMat prediction = kalman.Predict(null); float predict_angle = prediction.DataSingle[0]; CvPoint predict_pt = CalcPoint(img, predict_angle); Cv.RandSetRange(rng, 0, Math.Sqrt(kalman.MeasurementNoiseCov.DataSingle[0]), 0); Cv.Rand(rng, measurement); // generate measurement Cv.MatMulAdd(kalman.MeasurementMatrix, state, measurement, measurement); float measurement_angle = measurement.DataArraySingle[0]; CvPoint measurement_pt = CalcPoint(img, measurement_angle); img.SetZero(); DrawCross(img, state_pt, CvColor.White, 3); DrawCross(img, measurement_pt, CvColor.Red, 3); DrawCross(img, predict_pt, CvColor.Green, 3); img.Line(state_pt, measurement_pt, new CvColor(255, 0, 0), 3, LineType.AntiAlias, 0); img.Line(state_pt, predict_pt, new CvColor(255, 255, 0), 3, LineType.AntiAlias, 0); // adjust Kalman filter state kalman.Correct(measurement); Cv.RandSetRange(rng, 0, Math.Sqrt(kalman.ProcessNoiseCov.DataSingle[0]), 0); Cv.Rand(rng, process_noise); Cv.MatMulAdd(kalman.TransitionMatrix, state, process_noise, state); window.ShowImage(img); // break current simulation by pressing a key code = CvWindow.WaitKey(100); if (code > 0) { break; } } // exit by ESCAPE if (code == 27) { break; } } } }
public FindContours() { // cvFindContoursm cvDrawContours // 画像中から輪郭を検出し,-1~+1までのレベルにある輪郭を描画する const int SIZE = 500; using (IplImage img = new IplImage(SIZE, SIZE, BitDepth.U8, 1)) { // 画像の初期化 img.Zero(); for (int i = 0; i < 6; i++) { int dx = (i % 2) * 250 - 30; int dy = (i / 2) * 150; if (i == 0) { for (int j = 0; j <= 10; j++) { double angle = (j + 5) * Cv.PI / 21; CvPoint p1 = new CvPoint(Cv.Round(dx + 100 + j * 10 - 80 * Math.Cos(angle)), Cv.Round(dy + 100 - 90 * Math.Sin(angle))); CvPoint p2 = new CvPoint(Cv.Round(dx + 100 + j * 10 - 30 * Math.Cos(angle)), Cv.Round(dy + 100 - 30 * Math.Sin(angle))); Cv.Line(img, p1, p2, CvColor.White, 1, LineType.AntiAlias, 0); } } Cv.Ellipse(img, new CvPoint(dx + 150, dy + 100), new CvSize(100, 70), 0, 0, 360, CvColor.White, -1, LineType.AntiAlias, 0); Cv.Ellipse(img, new CvPoint(dx + 115, dy + 70), new CvSize(30, 20), 0, 0, 360, CvColor.Black, -1, LineType.AntiAlias, 0); Cv.Ellipse(img, new CvPoint(dx + 185, dy + 70), new CvSize(30, 20), 0, 0, 360, CvColor.Black, -1, LineType.AntiAlias, 0); Cv.Ellipse(img, new CvPoint(dx + 115, dy + 70), new CvSize(15, 15), 0, 0, 360, CvColor.White, -1, LineType.AntiAlias, 0); Cv.Ellipse(img, new CvPoint(dx + 185, dy + 70), new CvSize(15, 15), 0, 0, 360, CvColor.White, -1, LineType.AntiAlias, 0); Cv.Ellipse(img, new CvPoint(dx + 115, dy + 70), new CvSize(5, 5), 0, 0, 360, CvColor.Black, -1, LineType.AntiAlias, 0); Cv.Ellipse(img, new CvPoint(dx + 185, dy + 70), new CvSize(5, 5), 0, 0, 360, CvColor.Black, -1, LineType.AntiAlias, 0); Cv.Ellipse(img, new CvPoint(dx + 150, dy + 100), new CvSize(10, 5), 0, 0, 360, CvColor.Black, -1, LineType.AntiAlias, 0); Cv.Ellipse(img, new CvPoint(dx + 150, dy + 150), new CvSize(40, 10), 0, 0, 360, CvColor.Black, -1, LineType.AntiAlias, 0); Cv.Ellipse(img, new CvPoint(dx + 27, dy + 100), new CvSize(20, 35), 0, 0, 360, CvColor.White, -1, LineType.AntiAlias, 0); Cv.Ellipse(img, new CvPoint(dx + 273, dy + 100), new CvSize(20, 35), 0, 0, 360, CvColor.White, -1, LineType.AntiAlias, 0); } // 輪郭の検出 CvSeq <CvPoint> contours; CvMemStorage storage = new CvMemStorage(); // native style Cv.FindContours(img, storage, out contours, CvContour.SizeOf, ContourRetrieval.Tree, ContourChain.ApproxSimple); contours = Cv.ApproxPoly(contours, CvContour.SizeOf, storage, ApproxPolyMethod.DP, 3, true); // wrapper style //img.FindContours(storage, out contours, ContourRetrieval.Tree, ContourChain.ApproxSimple); //contours = contours.ApproxPoly(storage, ApproxPolyMethod.DP, 3, true); // ウィンドウに表示 using (CvWindow window_image = new CvWindow("image", img)) using (CvWindow window_contours = new CvWindow("contours")) { CvTrackbarCallback onTrackbar = delegate(int pos) { IplImage cnt_img = new IplImage(SIZE, SIZE, BitDepth.U8, 3); CvSeq <CvPoint> _contours = contours; int levels = pos - 3; if (levels <= 0) // get to the nearest face to make it look more funny { //_contours = _contours.HNext.HNext.HNext; } cnt_img.Zero(); Cv.DrawContours(cnt_img, _contours, CvColor.Red, CvColor.Green, levels, 3, LineType.AntiAlias); window_contours.ShowImage(cnt_img); cnt_img.Dispose(); }; window_contours.CreateTrackbar("levels+3", 3, 7, onTrackbar); onTrackbar(3); Cv.WaitKey(); } } }