private void MatchBySift(Mat src1, Mat src2) { Mat gray1 = new Mat(); Mat gray2 = new Mat(); Cv2.CvtColor(src1, gray1, ColorConversion.BgrToGray); Cv2.CvtColor(src2, gray2, ColorConversion.BgrToGray); SIFT sift = new SIFT(); // Detect the keypoints and generate their descriptors using SIFT KeyPoint[] keypoints1, keypoints2; MatOfFloat descriptors1 = new MatOfFloat(); MatOfFloat descriptors2 = new MatOfFloat(); sift.Run(gray1, null, out keypoints1, descriptors1); sift.Run(gray2, null, out keypoints2, descriptors2); // Matching descriptor vectors with a brute force matcher BFMatcher matcher = new BFMatcher(NormType.L2, false); DMatch[] matches = matcher.Match(descriptors1, descriptors2); // Draw matches Mat view = new Mat(); Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, matches, view); using (new Window("SIFT matching", WindowMode.AutoSize, view)) { Cv2.WaitKey(); } }
public void GetData() { var imPath = "D:\\PATHTOIMAGE\\Original.jpg"; var image = Cv2.ImRead(imPath); var sift = new SIFT(100); KeyPoint[] keypoints; MatOfFloat descriptors = new MatOfFloat(); sift.Run(image, null, out keypoints, descriptors); }
static void GenSIFT(string path, StreamWriter sw) { using (Mat input = new Mat(path, LoadMode.GrayScale)){ if (input.Empty()) { return; } double h = input.Rows, w = input.Cols; double newh, neww; if (h > w) { newh = maxSize; neww = Math.Ceiling(w / h * maxSize); } else { neww = maxSize; newh = Math.Ceiling(h / w * maxSize); } Size newsize = new Size(neww, newh); using (Mat rinput = new Mat()) { Cv2.Resize(input, rinput, newsize); using (MatOfFloat descriptors = new MatOfFloat()) { sift.Run(rinput, null, out keypoint, descriptors); var indexer = descriptors.GetIndexer(); int cnt = 0; System.Console.Out.WriteLine(descriptors.Rows); for (int i = 0; i < descriptors.Rows; i++) { String str = i.ToString(); // System.Console.Out.WriteLine(cnt); str = str + "\t0\t"; for (int j = 0; j < descriptors.Cols; j++) { str += indexer[i, j].ToString() + " "; } sw.Write(str + "\n"); cnt++; } input.Release(); rinput.Release(); descriptors.Release(); } } } }
private void MatchBySift(Mat src1, Mat src2) { var gray1 = new Mat(); var gray2 = new Mat(); Cv2.CvtColor(src1, gray1, ColorConversion.BgrToGray); Cv2.CvtColor(src2, gray2, ColorConversion.BgrToGray); var sift = new SIFT(); // Detect the keypoints and generate their descriptors using SIFT KeyPoint[] keypoints1, keypoints2; var descriptors1 = new MatOfFloat(); var descriptors2 = new MatOfFloat(); sift.Run(gray1, null, out keypoints1, descriptors1); sift.Run(gray2, null, out keypoints2, descriptors2); // Match descriptor vectors var bfMatcher = new BFMatcher(NormType.L2, false); var flannMatcher = new FlannBasedMatcher(); DMatch[] bfMatches = bfMatcher.Match(descriptors1, descriptors2); DMatch[] flannMatches = flannMatcher.Match(descriptors1, descriptors2); // Draw matches var bfView = new Mat(); Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, bfMatches, bfView); var flannView = new Mat(); Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, flannMatches, flannView); using (new Window("SIFT matching (by BFMather)", WindowMode.AutoSize, bfView)) using (new Window("SIFT matching (by FlannBasedMatcher)", WindowMode.AutoSize, flannView)) { Cv2.WaitKey(); } }
static void Main(string[] args) { // SURF surf = new SURF(500, 4, 2, true); SIFT sift = new SIFT(1000); KeyPoint[] keiKeyPoints2 = { }; KeyPoint[] keiKeyPoints1 = { }; MatOfFloat desc1 = new MatOfFloat(); MatOfFloat desc2 = new MatOfFloat(); Mat logo_img = Cv2.ImRead( @"C:\Users\Laptop\Documents\Projektai\Vaizdo-Atpazinimas\SIFTSURF\SIFTSURF\arduino2.png", LoadMode.GrayScale); Window videoWindow = new Window("Logo", WindowMode.FreeRatio); // surf.Run(logo_img, null, out keiKeyPoints1, desc1); sift.Run(logo_img, null, out keiKeyPoints1, desc1); BFMatcher matcher = new BFMatcher(NormType.L2); cameraWork(matcher, desc1, keiKeyPoints1, logo_img, sift); }
public static Result Run(AirPhoto mainMap, AirPhoto image) { SIFT sift = new SIFT(); BFMatcher matcher = new BFMatcher(); Result res = new Result(mainMap, image); Mat map = new Mat(res.MainMap.FileName); Mat singleImage = new Mat(res.SingleImage.FileName); sift.Run(map, null, out res.MainMap.keyPoints, res.MainMap.Descriptors); sift.Run(singleImage, null, out res.SingleImage.keyPoints, res.SingleImage.Descriptors); res.Matches = matcher.Match(res.MainMap.Descriptors, res.SingleImage.Descriptors); for (int i = 0; i < res.MainMap.Descriptors.Rows; i++) { double dist = res.Matches[i].Distance; if (dist < res.Min_dist) { res.Min_dist = dist; } if (dist > res.Max_dist) { res.Max_dist = dist; } } for (int i = 0; i < res.MainMap.Descriptors.Rows; i++) { if (res.Matches[i].Distance <= Math.Max(2 * res.Min_dist, 0.25)) { res.Good_matchesRAW.Add(res.Matches[i]); res.Current_good_matches.Add(res.Matches[i]); } } for (int i = 0; i < res.Good_matchesRAW.Count; i++) { res.MainMap.Keypoints_goodRAW.Add(res.MainMap.Keypoints[res.Good_matchesRAW[i].QueryIdx].Pt); res.SingleImage.Keypoints_goodRAW.Add(res.SingleImage.Keypoints[res.Good_matchesRAW[i].TrainIdx].Pt); res.MainMap.Current_good_keypoints.Add(res.MainMap.Keypoints[res.Good_matchesRAW[i].QueryIdx].Pt); res.SingleImage.Current_good_keypoints.Add(res.SingleImage.Keypoints[res.Good_matchesRAW[i].TrainIdx].Pt); } Mat view = new Mat(); Mat map_draw = new Mat(res.MainMap.FileName); Mat singleImage_draw = new Mat(res.SingleImage.FileName); Cv2.DrawMatches(map, res.MainMap.Keypoints, singleImage, res.SingleImage.Keypoints, res.Current_good_matches, view); for (int i = 0; i < res.MainMap.Current_good_keypoints.Count; i++) { Cv.DrawCircle((IplImage)map_draw, new CvPoint(Convert.ToInt32(res.MainMap.Current_good_keypoints[i].X), Convert.ToInt32(res.MainMap.Current_good_keypoints[i].Y)), map_draw.Width / 500, CvColor.Red, 2); } for (int i = 0; i < res.SingleImage.Current_good_keypoints.Count; i++) { Cv.DrawCircle((IplImage)singleImage_draw, new CvPoint(Convert.ToInt32(res.SingleImage.Current_good_keypoints[i].X), Convert.ToInt32(res.SingleImage.Current_good_keypoints[i].Y)), singleImage_draw.Width / 100, CvColor.Red, 2); } res.MainMap.RAWwithKP = map_draw; res.SingleImage.RAWwithKP = singleImage_draw; res.RAWmatches = view; return(res); }
/// <summary> /// Feature extraction and matching on the given 2 images. /// The result is the images concatenated together with features, /// matching and homography drawn on it. /// </summary> /// <param name="image1"></param> /// <param name="image2"></param> /// <param name="featureType"></param> /// <returns></returns> private IplImage Matching(IplImage image1, IplImage image2, FeatureType featureType) { Mat src1 = new Mat(image1); Mat src2 = new Mat(image2); KeyPoint[] keypoints1; KeyPoint[] keypoints2; MatOfFloat descriptors1 = new MatOfFloat(); MatOfFloat descriptors2 = new MatOfFloat(); // extract features with different feature-extration methods switch (featureType) { case FeatureType.Sift: sift.Run(src1, null, out keypoints1, descriptors1); sift.Run(src2, null, out keypoints2, descriptors2); break; case FeatureType.Surf: surf.Run(src1, null, out keypoints1, descriptors1); surf.Run(src2, null, out keypoints2, descriptors2); break; default: throw new NotSupportedException("Sorry, missing feature type."); } // matching descriptor vectors with a brute force matcher DMatch[] matches; switch (FeatureMatcher) { case MatcherType.BruteForce: matches = bruteForceMatcher.Match(descriptors1, descriptors2); break; case MatcherType.FlannBased: matches = flannBasedMatcher.Match(descriptors1, descriptors2); break; default: throw new NotSupportedException("Sorry, missing matcher type."); } // get only "good" matches, only good matches will be drawn List <DMatch> goodMatches; // // check to get only good matches or all matches if (IsGoodMatching) { // quick calculation of max and min distances between keypoints IEnumerable <float> distances = matches.Select(i => i.Distance); double maxDistance = 0; double minDistance = 100; double newMinDistance = distances.Min(); double newMaxDistance = distances.Max(); minDistance = (newMinDistance < minDistance) ? newMinDistance : minDistance; maxDistance = (newMaxDistance > maxDistance) ? newMaxDistance : maxDistance; goodMatches = matches.Where(i => i.Distance <= GoodMatchingThreshold * minDistance).ToList(); } else { goodMatches = matches.ToList(); } // draw matches Mat view = new Mat(); Cv2.DrawMatches(src1, keypoints1, src2, keypoints2, goodMatches, view); // homography need at least 4 points or more if (IsHomography && goodMatches.Count > 4) { // get good keypoints (localize the object) List <Point2d> goodKeypoints1 = new List <Point2d>(); List <Point2d> goodKeypoints2 = new List <Point2d>(); Point2f pt; // get the keypoints from the good matches for (int i = 0; i < goodMatches.Count; i++) { pt = keypoints1[goodMatches[i].QueryIdx].Pt; goodKeypoints1.Add(new Point2d(pt.X, pt.Y)); pt = keypoints2[goodMatches[i].TrainIdx].Pt; goodKeypoints2.Add(new Point2d(pt.X, pt.Y)); } // find the homography Mat homography = Cv2.FindHomography(goodKeypoints2, goodKeypoints1, HomographyMethod.Ransac); // get the corners from image1 InputArray corners1 = InputArray.Create(new Point2f[] { new Point2f(0, 0), new Point2f(src1.Cols, 0), new Point2f(src1.Cols, src1.Rows), new Point2f(0, src1.Rows), }.ToList()); OutputArray corners2 = OutputArray.Create(new Point2f[] { new Point2f(0, 0), new Point2f(0, 0), new Point2f(0, 0), new Point2f(0, 0), }.ToList()); InputArray perspectiveMatrix = InputArray.Create(homography); Cv2.PerspectiveTransform(corners1, corners2, perspectiveMatrix); Mat corners2Matrix = corners2.GetMat(); Point2f point1 = corners2Matrix.At <Point2f>(0, 0); Point2f point2 = corners2Matrix.At <Point2f>(1, 0); Point2f point3 = corners2Matrix.At <Point2f>(2, 0); Point2f point4 = corners2Matrix.At <Point2f>(3, 0); Scalar color = new Scalar(0, 200, 253); // draw lines between the corners Cv2.Line(view, point1, point2, color, 4); Cv2.Line(view, point2, point3, color, 4); Cv2.Line(view, point3, point4, color, 4); Cv2.Line(view, point4, point1, color, 4); } IplImage result = view.ToIplImage(); return(result); }
static void GenSIFT(string path, string filename) { try { using (Mat input = new Mat(path, LoadMode.GrayScale)){ if (input.Empty()) { return; } double h = input.Rows, w = input.Cols; double newh, neww; if (h > w) { newh = maxSize; neww = Math.Ceiling(w / h * maxSize); } else { neww = maxSize; newh = Math.Ceiling(h / w * maxSize); } Size newsize = new Size(neww, newh); using (Mat rinput = new Mat()) { Cv2.Resize(input, rinput, newsize); /* using (new Window("image", rinput)) * { * Cv2.WaitKey(); * }*/ using (MatOfFloat descriptors = new MatOfFloat()) { sift.Run(rinput, null, out keypoint, descriptors); cnt += 1; Console.Out.WriteLine(descriptors.Rows + " " + cnt); var indexer = descriptors.GetIndexer(); UInt64 v = 0; for (int i = 0; i < 16; i++) { v *= 16; if (filename[i] >= '0' && filename[i] <= '9') { v += (UInt64)filename[i] - '0'; } else { v += (UInt64)(filename[i] - 'a') + 10; } } for (int i = 0; i < descriptors.Rows; i++) { bw.Write((UInt64)v); for (int j = 0; j < descriptors.Cols; j++) { Byte b = (Byte)indexer[i, j]; bw.Write(b); } } tot += descriptors.Rows; input.Release(); rinput.Release(); descriptors.Release(); } } } } catch { return; } }