private static void FileStorageTest() { const string fileName = "foo.yml"; using (var fs = new FileStorage(fileName, FileStorageMode.Write | FileStorageMode.FormatYaml)) { fs.Write("int", 123); fs.Write("double", Math.PI); using (var tempMat = new Mat("data/lenna.png")) { fs.Write("mat", tempMat); } } using (var fs = new FileStorage(fileName, FileStorageMode.Read)) { Console.WriteLine("int: {0}", fs["int"].ReadInt()); Console.WriteLine("double: {0}", (double)fs["double"]); using (var window = new Window("mat")) { window.ShowImage(fs["mat"].ReadMat()); Cv2.WaitKey(); } } }
private static void BForceMatcherSample() { var src1 = new Mat("data/match1.png"); var src2 = new Mat("data/match2.png"); var gray1 = new Mat(); var gray2 = new Mat(); Cv2.CvtColor(src1, gray1, ColorConversion.BgrToGray); Cv2.CvtColor(src2, gray2, ColorConversion.BgrToGray); var fast = new FastFeatureDetector(10); var descriptorExtractor = new BriefDescriptorExtractor(32); var descriptors1 = new Mat(); var descriptors2 = new Mat(); KeyPoint[] keypoints1 = fast.Run(gray1, null); descriptorExtractor.Compute(gray1, ref keypoints1, descriptors1); KeyPoint[] keypoints2 = fast.Run(gray2, null); descriptorExtractor.Compute(gray2, ref keypoints2, descriptors2); // Match descriptor vectors var bfMatcher = new BFMatcher(NormType.L2, false); DMatch[][] bfMatches = bfMatcher.KnnMatch(descriptors1, descriptors2, 3, null, false); bfMatches.ToString(); var view = new Mat(); Cv2.DrawMatches(src1, keypoints1, src2, keypoints2, bfMatches, view); Window.ShowImages(view); }
private static void CaptureTest() { var capture = new VideoCapture(@"C:\a.wmv"); if (!capture.IsOpened()) { throw new Exception("Video can`t be played"); } int sleepTime = (int)Math.Round(1000 / capture.Fps); using (var window = new Window("capture")) { var trackbar = window.CreateTrackbar2("Position", 0, capture.FrameCount, (int pos, object userdata) => { capture.PosFrames = pos; }, null); // Frame image buffer Mat image = new Mat(); // When the movie playback reaches end, Mat.data becomes NULL. while (capture.Read(image)) { trackbar.Pos = capture.PosFrames; window.ShowImage(image); Cv2.WaitKey(sleepTime); } } }
private static void VideoCaptureSample() { var cap = new VideoCapture(0); if (!cap.IsOpened()) { Console.WriteLine("Can't use camera."); return; } var frame = new Mat(); cap.Read(frame); Window.ShowImages(frame); frame.ToString(); }
private static void ChamferMatchingSample() { using (var img = new Mat("data/lenna.png", LoadMode.GrayScale)) using (var templ = new Mat("data/lennas_eye.png", LoadMode.GrayScale)) { Point[][] points; float[] cost; using (var imgEdge = img.Canny(50, 200)) using (var templEdge = templ.Canny(50, 200)) { imgEdge.SaveImage("e1.png"); templEdge.SaveImage("e2.png"); var ret = Cv2.ChamferMatching(imgEdge, templEdge, out points, out cost); int i = 0; Console.WriteLine(ret); Console.WriteLine(points.Count()); using (var img3 = img.CvtColor(ColorConversion.GrayToRgb)) { foreach (var point in points) { foreach (var point1 in point) { Vec3b c = new Vec3b(0, 255, 0); img3.Set <Vec3b>(point1.Y, point1.X, c); } Console.WriteLine(cost[i]); i++; } foreach (var point1 in points[0]) { Vec3b c = new Vec3b(255, 0, 255); img3.Set <Vec3b>(point1.Y, point1.X, c); } Window.ShowImages(img3); img3.SaveImage("final.png"); } } } }
private static void Stitching(Mat[] images) { var stitcher = Stitcher.CreateDefault(false); Mat pano = new Mat(); Console.Write("Stitching 処理開始..."); var status = stitcher.Stitch(images, pano); Console.WriteLine(" 完了 {0}", status); pano.SaveImage(@"C:\temp\pano.png"); Window.ShowImages(pano); foreach (Mat image in images) { image.Dispose(); } }
private static void Clahe() { Mat src = new Mat("data/tsukuba_left.png", LoadMode.GrayScale); Mat dst20 = new Mat(); Mat dst40 = new Mat(); Mat dst44 = new Mat(); using (CLAHE clahe = Cv2.CreateCLAHE()) { clahe.ClipLimit = 20; clahe.Apply(src, dst20); clahe.ClipLimit = 40; clahe.Apply(src, dst40); clahe.TilesGridSize = new Size(4, 4); clahe.Apply(src, dst44); } Window.ShowImages(src, dst20, dst40, dst44); }
private static void Surf(IplImage img1, IplImage img2) { Mat src = new Mat(img1, true); Mat src2 = new Mat(img2, true); //Detect the keypoints and generate their descriptors using SURF SURF surf = new SURF(500, 4, 2, true); KeyPoint[] keypoints1, keypoints2; MatOfFloat descriptors1 = new MatOfFloat(); MatOfFloat descriptors2 = new MatOfFloat(); surf.Run(src, null, out keypoints1, descriptors1); surf.Run(src2, null, out keypoints2, descriptors2); // Matching descriptor vectors with a brute force matcher BFMatcher matcher = new BFMatcher(NormType.L2, false); DMatch[] matches = matcher.Match(descriptors1, descriptors2);//例外が発生する箇所 Mat view = new Mat(); Cv2.DrawMatches(src, keypoints1, src2, keypoints2, matches, view); Window.ShowImages(view); }