/// <summary> /// Match the Image feature from the observed image to the features from the model image /// </summary> /// <param name="observedFeatures">The Image feature from the observed image</param> /// <param name="k">The number of neighbors to find</param> /// <returns>The matched features</returns> public MatchedImageFeature[] MatchFeature(ImageFeature[] observedFeatures, int k) { VectorOfKeyPoint obsKpts; Matrix <float> obsDscpts; ConvertFromImageFeature(observedFeatures, out obsKpts, out obsDscpts); using (BruteForceMatcher matcher = new BruteForceMatcher(BruteForceMatcher.DistanceType.L2F32)) using (Matrix <int> indices = new Matrix <int>(obsKpts.Size, k)) using (Matrix <float> dists = new Matrix <float>(indices.Size)) { matcher.Add(_modelDescriptors); matcher.KnnMatch(obsDscpts, indices, dists, k, null); MatchedImageFeature[] result = new MatchedImageFeature[observedFeatures.Length]; for (int i = 0; i < observedFeatures.Length; i++) { result[i].SimilarFeatures = new SimilarFeature[k]; for (int j = 0; j < k; j++) { result[i].SimilarFeatures[j].Distance = dists.Data[i, j]; result[i].SimilarFeatures[j].Feature = _modelFeatures[indices.Data[i, j]]; } result[i].ObservedFeature = observedFeatures[i]; } obsKpts.Dispose(); obsDscpts.Dispose(); return(result); } }
/// <summary> /// Recover the homography matrix using RANDSAC. If the matrix cannot be recovered, null is returned. /// </summary> /// <param name="matchedFeatures">The Matched Features, only the first ModelFeature will be considered</param> /// <returns>The homography matrix, if it cannot be found, null is returned</returns> public static HomographyMatrix GetHomographyMatrixFromMatchedFeatures(MatchedImageFeature[] matchedFeatures) { if (matchedFeatures.Length < 4) return null; HomographyMatrix homography; if (matchedFeatures.Length < _randsacRequiredMatch) { // Too few points for randsac, use 4 points only PointF[] pts1 = new PointF[4]; PointF[] pts2 = new PointF[4]; for (int i = 0; i < 4; i++) { pts1[i] = matchedFeatures[i].SimilarFeatures[0].Feature.KeyPoint.Point; pts2[i] = matchedFeatures[i].ObservedFeature.KeyPoint.Point; } homography = CameraCalibration.GetPerspectiveTransform(pts1, pts2); } else { //use randsac to find the Homography Matrix PointF[] pts1 = new PointF[matchedFeatures.Length]; PointF[] pts2 = new PointF[matchedFeatures.Length]; for (int i = 0; i < matchedFeatures.Length; i++) { pts1[i] = matchedFeatures[i].SimilarFeatures[0].Feature.KeyPoint.Point; pts2[i] = matchedFeatures[i].ObservedFeature.KeyPoint.Point; } homography = CameraCalibration.FindHomography( pts1, //points on the model image pts2, //points on the observed image CvEnum.HOMOGRAPHY_METHOD.RANSAC, 3); if (homography == null) return null; } if (homography.IsValid(10)) return homography; else { homography.Dispose(); return null; } }
/* * private static int CompareSimilarFeature(SimilarFeature f1, SimilarFeature f2) * { * if (f1.Distance < f2.Distance) * return -1; * if (f1.Distance == f2.Distance) * return 0; * else * return 1; * }*/ /// <summary> /// Match the Image feature from the observed image to the features from the model image /// </summary> /// <param name="observedFeatures">The Image feature from the observed image</param> /// <param name="k">The number of neighbors to find</param> /// <param name="emax">For k-d tree only: the maximum number of leaves to visit.</param> /// <returns>The matched features</returns> public MatchedImageFeature[] MatchFeature(ImageFeature[] observedFeatures, int k, int emax) { if (observedFeatures.Length == 0) { return(new MatchedImageFeature[0]); } float[][] descriptors = new float[observedFeatures.Length][]; for (int i = 0; i < observedFeatures.Length; i++) { descriptors[i] = observedFeatures[i].Descriptor; } using (Matrix <int> result1 = new Matrix <int>(descriptors.Length, k)) using (Matrix <float> dist1 = new Matrix <float>(descriptors.Length, k)) { _modelIndex.KnnSearch(CvToolbox.GetMatrixFromDescriptors(descriptors), result1, dist1, k, emax); int[,] indexes = result1.Data; float[,] distances = dist1.Data; MatchedImageFeature[] res = new MatchedImageFeature[observedFeatures.Length]; List <SimilarFeature> matchedFeatures = new List <SimilarFeature>(); for (int i = 0; i < res.Length; i++) { matchedFeatures.Clear(); for (int j = 0; j < k; j++) { int index = indexes[i, j]; if (index >= 0) { matchedFeatures.Add(new SimilarFeature(distances[i, j], _modelFeatures[index])); } } res[i].ObservedFeature = observedFeatures[i]; res[i].SimilarFeatures = matchedFeatures.ToArray(); } return(res); } }
/// <summary> /// Eliminate the matched features whose scale and rotation do not aggree with the majority's scale and rotation. /// </summary> /// <param name="rotationBins">The numbers of bins for rotation, a good value might be 20 (which means each bin covers 18 degree)</param> /// <param name="scaleIncrement">This determins the different in scale for neighbour hood bins, a good value might be 1.5 (which means matched features in bin i+1 is scaled 1.5 times larger than matched features in bin i</param> /// <param name="matchedFeatures">The matched feature that will be participated in the voting. For each matchedFeatures, only the zero indexed ModelFeature will be considered.</param> public static MatchedImageFeature[] VoteForSizeAndOrientation(MatchedImageFeature[] matchedFeatures, double scaleIncrement, int rotationBins) { int elementsCount = matchedFeatures.Length; if (elementsCount < 1) { return(matchedFeatures); } float[] scales = new float[elementsCount]; float[] rotations = new float[elementsCount]; float[] flags = new float[elementsCount]; float minScale = float.MaxValue; float maxScale = float.MinValue; for (int i = 0; i < matchedFeatures.Length; i++) { float scale = (float)matchedFeatures[i].ObservedFeature.KeyPoint.Size / (float)matchedFeatures[i].SimilarFeatures[0].Feature.KeyPoint.Size; scale = (float)Math.Log10(scale); scales[i] = scale; if (scale < minScale) { minScale = scale; } if (scale > maxScale) { maxScale = scale; } float rotation = matchedFeatures[i].ObservedFeature.KeyPoint.Angle - matchedFeatures[i].SimilarFeatures[0].Feature.KeyPoint.Angle; rotations[i] = rotation < 0.0 ? rotation + 360 : rotation; } int scaleBinSize = (int)Math.Max(((maxScale - minScale) / Math.Log10(scaleIncrement)), 1); if (scaleBinSize == 1) { //handle the case where there is only one scale bin using (DenseHistogram h = new DenseHistogram(new int[] { rotationBins }, new RangeF[] { new RangeF(0, 360) })) { int count; GCHandle rotationHandle = GCHandle.Alloc(rotations, GCHandleType.Pinned); GCHandle flagsHandle = GCHandle.Alloc(flags, GCHandleType.Pinned); using (Matrix <float> flagsMat = new Matrix <float>(1, elementsCount, flagsHandle.AddrOfPinnedObject())) using (Matrix <float> rotationsMat = new Matrix <float>(1, elementsCount, rotationHandle.AddrOfPinnedObject())) { h.Calculate(new Matrix <float>[] { rotationsMat }, true, null); float minVal, maxVal; int[] minLoc, maxLoc; h.MinMax(out minVal, out maxVal, out minLoc, out maxLoc); h.Threshold(maxVal * 0.5); CvInvoke.cvCalcBackProject(new IntPtr[] { rotationsMat.Ptr }, flagsMat.Ptr, h.Ptr); count = CvInvoke.cvCountNonZero(flagsMat); } rotationHandle.Free(); flagsHandle.Free(); MatchedImageFeature[] matchedGoodFeatures = new MatchedImageFeature[count]; int index = 0; for (int i = 0; i < matchedFeatures.Length; i++) { if (flags[i] != 0) { matchedGoodFeatures[index++] = matchedFeatures[i]; } } return(matchedGoodFeatures); } } else { using (DenseHistogram h = new DenseHistogram(new int[] { scaleBinSize, rotationBins }, new RangeF[] { new RangeF(minScale, maxScale), new RangeF(0, 360) })) { int count; GCHandle scaleHandle = GCHandle.Alloc(scales, GCHandleType.Pinned); GCHandle rotationHandle = GCHandle.Alloc(rotations, GCHandleType.Pinned); GCHandle flagsHandle = GCHandle.Alloc(flags, GCHandleType.Pinned); using (Matrix <float> flagsMat = new Matrix <float>(1, elementsCount, flagsHandle.AddrOfPinnedObject())) using (Matrix <float> scalesMat = new Matrix <float>(1, elementsCount, scaleHandle.AddrOfPinnedObject())) using (Matrix <float> rotationsMat = new Matrix <float>(1, elementsCount, rotationHandle.AddrOfPinnedObject())) { h.Calculate(new Matrix <float>[] { scalesMat, rotationsMat }, true, null); float minVal, maxVal; int[] minLoc, maxLoc; h.MinMax(out minVal, out maxVal, out minLoc, out maxLoc); h.Threshold(maxVal * 0.5); CvInvoke.cvCalcBackProject(new IntPtr[] { scalesMat.Ptr, rotationsMat.Ptr }, flagsMat.Ptr, h.Ptr); count = CvInvoke.cvCountNonZero(flagsMat); } scaleHandle.Free(); rotationHandle.Free(); flagsHandle.Free(); MatchedImageFeature[] matchedGoodFeatures = new MatchedImageFeature[count]; int index = 0; for (int i = 0; i < matchedFeatures.Length; i++) { if (flags[i] != 0) { matchedGoodFeatures[index++] = matchedFeatures[i]; } } return(matchedGoodFeatures); } } }
/// <summary> /// Eliminate the matched features whose scale and rotation do not aggree with the majority's scale and rotation. /// </summary> /// <param name="rotationBins">The numbers of bins for rotation, a good value might be 20 (which means each bin covers 18 degree)</param> /// <param name="scaleIncrement">This determins the different in scale for neighbour hood bins, a good value might be 1.5 (which means matched features in bin i+1 is scaled 1.5 times larger than matched features in bin i</param> /// <param name="matchedFeatures">The matched feature that will be participated in the voting. For each matchedFeatures, only the zero indexed ModelFeature will be considered.</param> public static MatchedImageFeature[] VoteForSizeAndOrientation(MatchedImageFeature[] matchedFeatures, double scaleIncrement, int rotationBins) { int elementsCount = matchedFeatures.Length; if (elementsCount < 1) return matchedFeatures; float[] scales = new float[elementsCount]; float[] rotations = new float[elementsCount]; float[] flags = new float[elementsCount]; float minScale = float.MaxValue; float maxScale = float.MinValue; for (int i = 0; i < matchedFeatures.Length; i++) { float scale = (float)matchedFeatures[i].ObservedFeature.KeyPoint.Size / (float)matchedFeatures[i].SimilarFeatures[0].Feature.KeyPoint.Size; scale = (float)Math.Log10(scale); scales[i] = scale; if (scale < minScale) minScale = scale; if (scale > maxScale) maxScale = scale; float rotation = matchedFeatures[i].ObservedFeature.KeyPoint.Angle - matchedFeatures[i].SimilarFeatures[0].Feature.KeyPoint.Angle; rotations[i] = rotation < 0.0 ? rotation + 360 : rotation; } int scaleBinSize = (int)Math.Max(((maxScale - minScale) / Math.Log10(scaleIncrement)), 1); if (scaleBinSize == 1) { //handle the case where there is only one scale bin using (DenseHistogram h = new DenseHistogram(new int[] { rotationBins }, new RangeF[] { new RangeF(0, 360) })) { int count; GCHandle rotationHandle = GCHandle.Alloc(rotations, GCHandleType.Pinned); GCHandle flagsHandle = GCHandle.Alloc(flags, GCHandleType.Pinned); using (Matrix<float> flagsMat = new Matrix<float>(1, elementsCount, flagsHandle.AddrOfPinnedObject())) using (Matrix<float> rotationsMat = new Matrix<float>(1, elementsCount, rotationHandle.AddrOfPinnedObject())) { h.Calculate(new Matrix<float>[] { rotationsMat }, true, null); float minVal, maxVal; int[] minLoc, maxLoc; h.MinMax(out minVal, out maxVal, out minLoc, out maxLoc); h.Threshold(maxVal * 0.5); CvInvoke.cvCalcBackProject(new IntPtr[] { rotationsMat.Ptr }, flagsMat.Ptr, h.Ptr); count = CvInvoke.cvCountNonZero(flagsMat); } rotationHandle.Free(); flagsHandle.Free(); MatchedImageFeature[] matchedGoodFeatures = new MatchedImageFeature[count]; int index = 0; for (int i = 0; i < matchedFeatures.Length; i++) if (flags[i] != 0) matchedGoodFeatures[index++] = matchedFeatures[i]; return matchedGoodFeatures; } } else { using (DenseHistogram h = new DenseHistogram(new int[] { scaleBinSize, rotationBins }, new RangeF[] { new RangeF(minScale, maxScale), new RangeF(0, 360) })) { int count; GCHandle scaleHandle = GCHandle.Alloc(scales, GCHandleType.Pinned); GCHandle rotationHandle = GCHandle.Alloc(rotations, GCHandleType.Pinned); GCHandle flagsHandle = GCHandle.Alloc(flags, GCHandleType.Pinned); using (Matrix<float> flagsMat = new Matrix<float>(1, elementsCount, flagsHandle.AddrOfPinnedObject())) using (Matrix<float> scalesMat = new Matrix<float>(1, elementsCount, scaleHandle.AddrOfPinnedObject())) using (Matrix<float> rotationsMat = new Matrix<float>(1, elementsCount, rotationHandle.AddrOfPinnedObject())) { h.Calculate(new Matrix<float>[] { scalesMat, rotationsMat }, true, null); float minVal, maxVal; int[] minLoc, maxLoc; h.MinMax(out minVal, out maxVal, out minLoc, out maxLoc); h.Threshold(maxVal * 0.5); CvInvoke.cvCalcBackProject(new IntPtr[] { scalesMat.Ptr, rotationsMat.Ptr }, flagsMat.Ptr, h.Ptr); count = CvInvoke.cvCountNonZero(flagsMat); } scaleHandle.Free(); rotationHandle.Free(); flagsHandle.Free(); MatchedImageFeature[] matchedGoodFeatures = new MatchedImageFeature[count]; int index = 0; for (int i = 0; i < matchedFeatures.Length; i++) if (flags[i] != 0) matchedGoodFeatures[index++] = matchedFeatures[i]; return matchedGoodFeatures; } } }
/* private static int CompareSimilarFeature(SimilarFeature f1, SimilarFeature f2) { if (f1.Distance < f2.Distance) return -1; if (f1.Distance == f2.Distance) return 0; else return 1; }*/ /// <summary> /// Match the Image feature from the observed image to the features from the model image /// </summary> /// <param name="observedFeatures">The Image feature from the observed image</param> /// <param name="k">The number of neighbors to find</param> /// <param name="emax">For k-d tree only: the maximum number of leaves to visit.</param> /// <returns>The matched features</returns> public MatchedImageFeature[] MatchFeature(ImageFeature[] observedFeatures, int k, int emax) { if (observedFeatures.Length == 0) return new MatchedImageFeature[0]; float[][] descriptors = new float[observedFeatures.Length][]; for (int i = 0; i < observedFeatures.Length; i++) descriptors[i] = observedFeatures[i].Descriptor; using (Matrix<int> result1 = new Matrix<int>(descriptors.Length, k)) using (Matrix<float> dist1 = new Matrix<float>(descriptors.Length, k)) { _modelIndex.KnnSearch(CvToolbox.GetMatrixFromDescriptors(descriptors), result1, dist1, k, emax); int[,] indexes = result1.Data; float[,] distances = dist1.Data; MatchedImageFeature[] res = new MatchedImageFeature[observedFeatures.Length]; List<SimilarFeature> matchedFeatures = new List<SimilarFeature>(); for (int i = 0; i < res.Length; i++) { matchedFeatures.Clear(); for (int j = 0; j < k; j++) { int index = indexes[i, j]; if (index >= 0) { matchedFeatures.Add(new SimilarFeature(distances[i, j], _modelFeatures[index])); } } res[i].ObservedFeature = observedFeatures[i]; res[i].SimilarFeatures = matchedFeatures.ToArray(); } return res; } }
/// <summary> /// Filter the matched Features, such that if a match is not unique, it is rejected. /// </summary> /// <param name="matchedFeatures">The Matched Image features, each of them has the model feature sorted by distance. (e.g. SortMatchedFeaturesByDistance )</param> /// <param name="uniquenessThreshold">The distance different ratio which a match is consider unique, a good number will be 0.8</param> /// <returns>The filtered matched Image Features</returns> public static MatchedImageFeature[] VoteForUniqueness(MatchedImageFeature[] matchedFeatures, double uniquenessThreshold) { return Array.FindAll<MatchedImageFeature>(matchedFeatures, delegate(MatchedImageFeature f) { return f.SimilarFeatures.Length == 1 //this is the only match || (f.SimilarFeatures[0].Distance / f.SimilarFeatures[1].Distance <= uniquenessThreshold); //if the first model feature is a good match }); }
/// <summary> /// Match the Image feature from the observed image to the features from the model image /// </summary> /// <param name="observedFeatures">The Image feature from the observed image</param> /// <param name="k">The number of neighbors to find</param> /// <returns>The matched features</returns> public MatchedImageFeature[] MatchFeature(ImageFeature[] observedFeatures, int k) { VectorOfKeyPoint obsKpts; Matrix<float> obsDscpts; ConvertFromImageFeature(observedFeatures, out obsKpts, out obsDscpts); using (BruteForceMatcher matcher = new BruteForceMatcher(BruteForceMatcher.DistanceType.L2F32)) using (Matrix<int> indices = new Matrix<int>(obsKpts.Size, k)) using (Matrix<float> dists = new Matrix<float>(indices.Size)) { matcher.Add(_modelDescriptors); matcher.KnnMatch(obsDscpts, indices, dists, k, null); MatchedImageFeature[] result = new MatchedImageFeature[observedFeatures.Length]; for (int i = 0; i < observedFeatures.Length; i++) { result[i].SimilarFeatures = new SimilarFeature[k]; for (int j = 0; j < k; j++) { result[i].SimilarFeatures[j].Distance = dists.Data[i, j]; result[i].SimilarFeatures[j].Feature = _modelFeatures[indices.Data[i, j]]; } result[i].ObservedFeature = observedFeatures[i]; } obsKpts.Dispose(); obsDscpts.Dispose(); return result; } }
/// <summary> /// Convert the raw keypoints and descriptors to array of managed structure. /// </summary> /// <param name="modelKeyPointVec">The model keypoint vector</param> /// <param name="modelDescriptorMat">The mode descriptor vector</param> /// <param name="observedKeyPointVec">The observerd keypoint vector</param> /// <param name="observedDescriptorMat">The observed descriptor vector</param> /// <param name="indices">The indices matrix</param> /// <param name="dists">The distances matrix</param> /// <param name="mask">The mask</param> /// <returns>The managed MatchedImageFeature array</returns> public static MatchedImageFeature[] ConvertToMatchedImageFeature( VectorOfKeyPoint modelKeyPointVec, Matrix <TDescriptor> modelDescriptorMat, VectorOfKeyPoint observedKeyPointVec, Matrix <TDescriptor> observedDescriptorMat, Matrix <int> indices, Matrix <float> dists, Matrix <Byte> mask) { MKeyPoint[] modelKeyPoints = modelKeyPointVec.ToArray(); MKeyPoint[] observedKeyPoints = observedKeyPointVec.ToArray(); int resultLength = (mask == null) ? observedKeyPoints.Length : CvInvoke.cvCountNonZero(mask); MatchedImageFeature[] result = new MatchedImageFeature[resultLength]; MCvMat modelMat = (MCvMat)Marshal.PtrToStructure(modelDescriptorMat.Ptr, typeof(MCvMat)); long modelPtr = modelMat.data.ToInt64(); int modelStep = modelMat.step; MCvMat observedMat = (MCvMat)Marshal.PtrToStructure(observedDescriptorMat.Ptr, typeof(MCvMat)); long observedPtr = observedMat.data.ToInt64(); int observedStep = observedMat.step; int descriptorLength = modelMat.cols; int descriptorSizeInByte = descriptorLength * Marshal.SizeOf(typeof(TDescriptor)); int k = dists.Cols; TDescriptor[] tmp = new TDescriptor[descriptorLength]; GCHandle handle = GCHandle.Alloc(tmp, GCHandleType.Pinned); IntPtr address = handle.AddrOfPinnedObject(); int resultIdx = 0; for (int i = 0; i < observedKeyPoints.Length; i++) { if (mask != null && mask.Data[i, 0] == 0) { continue; } SimilarFeature[] features = new SimilarFeature[k]; for (int j = 0; j < k; j++) { features[j].Distance = dists.Data[i, j]; ImageFeature <TDescriptor> imgFeature = new ImageFeature <TDescriptor>(); int idx = indices.Data[i, j]; if (idx == -1) { Array.Resize(ref features, j); break; } imgFeature.KeyPoint = modelKeyPoints[idx]; imgFeature.Descriptor = new TDescriptor[descriptorLength]; Emgu.Util.Toolbox.memcpy(address, new IntPtr(modelPtr + modelStep * idx), descriptorSizeInByte); tmp.CopyTo(imgFeature.Descriptor, 0); features[j].Feature = imgFeature; } result[resultIdx].SimilarFeatures = features; ImageFeature <TDescriptor> observedFeature = new ImageFeature <TDescriptor>(); observedFeature.KeyPoint = observedKeyPoints[i]; observedFeature.Descriptor = new TDescriptor[descriptorLength]; Emgu.Util.Toolbox.memcpy(address, new IntPtr(observedPtr + observedStep * i), descriptorSizeInByte); tmp.CopyTo(observedFeature.Descriptor, 0); result[resultIdx].ObservedFeature = observedFeature; resultIdx++; } handle.Free(); return(result); }