public void TestCudaSURFKeypointDetection() { if (CudaInvoke.HasCuda) { Image <Gray, byte> image = new Image <Gray, byte>(200, 100); image.SetRandUniform(new MCvScalar(), new MCvScalar(255)); GpuMat gpuMat = new GpuMat(image); EmguAssert.IsTrue(gpuMat.ToMat().Equals(image.Mat)); CudaSURF cudaSurf = new CudaSURF(100.0f, 2, 4, false, 0.01f, false); GpuMat cudaKpts = cudaSurf.DetectKeyPointsRaw(gpuMat, null); VectorOfKeyPoint kpts = new VectorOfKeyPoint(); cudaSurf.DownloadKeypoints(cudaKpts, kpts); } }
private static Mat FindMatchWithCuda(Mat modelImage, Mat observedImage, VectorOfKeyPoint modelKeyPoints, VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, Mat homography, int k, double uniquenessThreshold, double hessianThresh) { CudaSURF surfCuda = new CudaSURF((float)hessianThresh); using (GpuMat gpuModelImage = new GpuMat(modelImage)) //extract features from the object image using (GpuMat gpuModelKeyPoints = surfCuda.DetectKeyPointsRaw(gpuModelImage, null)) using (GpuMat gpuModelDescriptors = surfCuda.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints)) using (CudaBFMatcher matcher = new CudaBFMatcher(DistanceType.L2)) { surfCuda.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints); // extract features from the observed image using (GpuMat gpuObservedImage = new GpuMat(observedImage)) using (GpuMat gpuObservedKeyPoints = surfCuda.DetectKeyPointsRaw(gpuObservedImage, null)) using (GpuMat gpuObservedDescriptors = surfCuda.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints)) //using (GpuMat tmp = new GpuMat()) //using (Stream stream = new Stream()) { matcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, matches, k); surfCuda.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints); mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1); mask.SetTo(new MCvScalar(255)); Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask); int nonZeroCount = CvInvoke.CountNonZero(mask); if (nonZeroCount >= 4) { nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20); if (nonZeroCount >= 4) { homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2); } } } } return(homography); }
private static void DetectKeyPointsCuda(Image <Bgr, byte> image) { Console.WriteLine(CudaInvoke.HasCuda); using (GpuMat <byte> gpuImage = new GpuMat <byte>(image.Mat)) { CudaSURF surfDetector = new CudaSURF(400, 4, 2, false); MKeyPoint[] features = surfDetector.DetectKeyPoints(gpuImage, null); using (Image <Bgr, byte> imageWithFeatures = new Image <Bgr, byte>(image.Bitmap)) { foreach (MKeyPoint mKeyPoint in features) { imageWithFeatures.Draw(".", new Point((int)mKeyPoint.Point.X, (int)mKeyPoint.Point.Y), FontFace.HersheyComplex, 1, new Bgr(Color.Gray)); } ImageViewer.Show(imageWithFeatures); } } }
public static void FindMatch(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography) { int k = 2; double uniquenessThreshold = 0.8; double hessianThresh = 300; Stopwatch watch; homography = null; modelKeyPoints = new VectorOfKeyPoint(); observedKeyPoints = new VectorOfKeyPoint(); if (CudaInvoke.HasCuda) { CudaSURF surfCuda = new CudaSURF((float)hessianThresh); using (GpuMat gpuModelImage = new GpuMat(modelImage)) //extract features from the object image using (GpuMat gpuModelKeyPoints = surfCuda.DetectKeyPointsRaw(gpuModelImage, null)) using (GpuMat gpuModelDescriptors = surfCuda.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints)) using (CudaBFMatcher matcher = new CudaBFMatcher(DistanceType.L2)) { surfCuda.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints); watch = Stopwatch.StartNew(); // extract features from the observed image using (GpuMat gpuObservedImage = new GpuMat(observedImage)) using (GpuMat gpuObservedKeyPoints = surfCuda.DetectKeyPointsRaw(gpuObservedImage, null)) using (GpuMat gpuObservedDescriptors = surfCuda.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints)) //using (GpuMat tmp = new GpuMat()) //using (Stream stream = new Stream()) { matcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, matches, k); surfCuda.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints); mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1); mask.SetTo(new MCvScalar(255)); Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask); int nonZeroCount = CvInvoke.CountNonZero(mask); if (nonZeroCount >= 4) { nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20); if (nonZeroCount >= 4) { homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2); } } } watch.Stop(); } } else { using (UMat uModelImage = modelImage.GetUMat(AccessType.Read)) using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read)) { SURF surfCPU = new SURF(hessianThresh); //extract features from the object image UMat modelDescriptors = new UMat(); surfCPU.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false); watch = Stopwatch.StartNew(); // extract features from the observed image UMat observedDescriptors = new UMat(); surfCPU.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false); BFMatcher matcher = new BFMatcher(DistanceType.L2); matcher.Add(modelDescriptors); matcher.KnnMatch(observedDescriptors, matches, k, null); mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1); mask.SetTo(new MCvScalar(255)); Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask); watch.Stop(); } } matchTime = watch.ElapsedMilliseconds; }
/// <summary> /// Finds matching points in the faces using SURF /// </summary> /// <param name="modelImage"> /// The model image. /// </param> /// <param name="observedImage"> /// The observed image. /// </param> /// <param name="matchTime"> /// The match time. /// </param> /// <param name="modelKeyPoints"> /// The model key points. /// </param> /// <param name="observedKeyPoints"> /// The observed key points. /// </param> /// <param name="matches"> /// The matches. /// </param> /// <param name="mask"> /// The mask. /// </param> /// <param name="homography"> /// The homography. /// </param> /// <param name="score"> /// The score. /// </param> private void FindMatch( Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography, out long score) { int k = 2; double uniquenessThreshold = 5; Stopwatch watch; homography = null; mask = null; score = 0; modelKeyPoints = new VectorOfKeyPoint(); observedKeyPoints = new VectorOfKeyPoint(); if (Controller.Instance.Cuda) { CudaSURF surfGPU = new CudaSURF(700f, 4, 2, false); using (CudaImage <Gray, byte> gpuModelImage = new CudaImage <Gray, byte>(modelImage)) //extract features from the object image using (GpuMat gpuModelKeyPoints = surfGPU.DetectKeyPointsRaw(gpuModelImage, null)) using (GpuMat gpuModelDescriptors = surfGPU.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints)) using (CudaBFMatcher matcher = new CudaBFMatcher(DistanceType.L2)) { surfGPU.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints); watch = Stopwatch.StartNew(); // extract features from the observed image using (CudaImage <Gray, Byte> gpuObservedImage = new CudaImage <Gray, byte>(observedImage)) using (GpuMat gpuObservedKeyPoints = surfGPU.DetectKeyPointsRaw(gpuObservedImage, null)) using (GpuMat gpuObservedDescriptors = surfGPU.ComputeDescriptorsRaw( gpuObservedImage, null, gpuObservedKeyPoints)) using (GpuMat <int> gpuMatchIndices = new GpuMat <int>( gpuObservedDescriptors.Size.Height, k, 1, true)) using (GpuMat <float> gpuMatchDist = new GpuMat <float>( gpuObservedDescriptors.Size.Height, k, 1, true)) //using (GpuMat<Byte> gpuMask = new GpuMat<byte>(gpuMatchIndices.Size.Height, 1, 1)) using (Emgu.CV.Cuda.Stream stream = new Emgu.CV.Cuda.Stream()) { matcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, matches, k, null); //indices = new Matrix<int>(gpuMatchIndices.Size); //mask = new Matrix<byte>(gpuMask.Size); mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1); mask.SetTo(new MCvScalar(255)); surfGPU.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints); /*//gpu implementation of voteForUniquess * using (GpuMat col0 = gpuMatchDist.Col(0)) * using (GpuMat col1 = gpuMatchDist.Col(1)) * { * CudaInvoke.Multiply(col1, new GpuMat(), col1, 1, DepthType.Default, stream); * CudaInvoke.Compare(col0, col1, mask, CmpType.LessEqual, stream); * }*/ Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask); //wait for the stream to complete its tasks //We can perform some other CPU intesive stuffs here while we are waiting for the stream to complete. stream.WaitForCompletion(); //gpuMatchIndices.Download(indices); if (CudaInvoke.CountNonZero(mask) >= 4) { int nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation( modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20); if (nonZeroCount >= 4) { homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures( modelKeyPoints, observedKeyPoints, matches, mask, 2); } } watch.Stop(); } for (int i = 0; i < matches.Size; i++) { score++; } } } //else //{ // SURF surfCPU = new SURF(500, 4, 2, false); // //extract features from the object image // modelKeyPoints = new VectorOfKeyPoint(); // Matrix<float> modelDescriptors = surfCPU.DetectAndCompute(modelImage, null, modelKeyPoints); // watch = Stopwatch.StartNew(); // // extract features from the observed image // observedKeyPoints = new VectorOfKeyPoint(); // Matrix<float> observedDescriptors = surfCPU.DetectAndCompute(observedImage, null, observedKeyPoints); // BFMatcher matcher = new BFMatcher<float>(DistanceType.L2); // matcher.Add(modelDescriptors); // indices = new Matrix<int>(observedDescriptors.Rows, k); // using (Matrix<float> dist = new Matrix<float>(observedDescriptors.Rows, k)) // { // matcher.KnnMatch(observedDescriptors, indices, dist, k, null); // mask = new Matrix<byte>(dist.Rows, 1); // mask.SetValue(255); // Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask); // } // int nonZeroCount = CvInvoke.cvCountNonZero(mask); // if (nonZeroCount >= 4) // { // nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20); // if (nonZeroCount >= 4) // homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2); // } // watch.Stop(); //} matchTime = 0; }
/// <summary> /// Detect image using SURF /// </summary> /// <param name="modelImage"></param> /// <param name="observedImage"></param> /// <param name="modelKeyPoints"></param> /// <param name="observedKeyPoints"></param> /// <param name="matches"></param> /// <param name="mask"></param> /// <param name="homography"></param> public static void FindMatch(Mat modelImage, Mat observedImage, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography, out int score) { int k = 2; double uniquenessThreshold = 0.8; double hessianThresh = 300; homography = null; modelKeyPoints = new VectorOfKeyPoint(); observedKeyPoints = new VectorOfKeyPoint(); if (CudaInvoke.HasCuda) { CudaSURF surfCuda = new CudaSURF((float)hessianThresh); using (GpuMat gpuModelImage = new GpuMat(modelImage)) //extract features from the object image using (GpuMat gpuModelKeyPoints = surfCuda.DetectKeyPointsRaw(gpuModelImage, null)) using (GpuMat gpuModelDescriptors = surfCuda.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints)) using (CudaBFMatcher matcher = new CudaBFMatcher(DistanceType.L2)) { surfCuda.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints); // extract features from the observed image using (GpuMat gpuObservedImage = new GpuMat(observedImage)) using (GpuMat gpuObservedKeyPoints = surfCuda.DetectKeyPointsRaw(gpuObservedImage, null)) using (GpuMat gpuObservedDescriptors = surfCuda.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints)) //using (GpuMat tmp = new GpuMat()) //using (Stream stream = new Stream()) { matcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, matches, k); surfCuda.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints); mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1); mask.SetTo(new MCvScalar(255)); Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask); score = 0; for (int i = 0; i < matches.Size; i++) { if ((byte)mask.GetData().GetValue(i, 0) == 0) { continue; } foreach (var e in matches[i].ToArray()) { ++score; } } int nonZeroCount = CvInvoke.CountNonZero(mask); if (nonZeroCount >= 4) { nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20); if (nonZeroCount >= 4) { homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2); } } } } } else { using (UMat uModelImage = modelImage.GetUMat(AccessType.Read)) using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read)) { SURF surfCPU = new SURF(hessianThresh); //extract features from the object image UMat modelDescriptors = new UMat(); surfCPU.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false); // extract features from the observed image UMat observedDescriptors = new UMat(); surfCPU.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false); BFMatcher matcher = new BFMatcher(DistanceType.L2); matcher.Add(modelDescriptors); matcher.KnnMatch(observedDescriptors, matches, k, null); mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1); mask.SetTo(new MCvScalar(255)); Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask); score = 0; for (int i = 0; i < matches.Size; i++) { //if (mask.GetData(true)[0] == 0) continue; foreach (var e in matches[i].ToArray()) { ++score; } } int nonZeroCount = CvInvoke.CountNonZero(mask); if (nonZeroCount >= 4) { nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20); if (nonZeroCount >= 4) { homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2); } } } } }
public static void FindMatch(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography, double hessianThresh) { int k = 2; double uniquenessThreshold = 0.8; //double hessianThresh = 300;设置阈值,这个值越大,最终的特征点越少 Stopwatch sw; homography = null; modelKeyPoints = new VectorOfKeyPoint(); observedKeyPoints = new VectorOfKeyPoint(); #if !__IOS__ //判断是否存在NVIDIA显卡,如果存在就是使用GPU进行计算 if (CudaInvoke.HasCuda) { //SURF算法 //创建一个CudaSurf 侦测器 CudaSURF surfCuda = new CudaSURF((float)hessianThresh); //在Gpu中 使用GpuMat 来替代cv::Mat using (GpuMat gpuModelImage = new GpuMat(modelImage)) //从图像中提取特征点 using (GpuMat gpuModelKeyPoints = surfCuda.DetectKeyPointsRaw(gpuModelImage, null)) //创建特征点描述器 using (GpuMat gupModelDescriptors = surfCuda.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints)) //创建匹配器 using (CudaBFMatcher matcher = new CudaBFMatcher(DistanceType.L2)) { surfCuda.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints); sw = Stopwatch.StartNew(); using (GpuMat gpuObservedImage = new GpuMat(observedImage)) using (GpuMat gpuObservedKeyPoints = surfCuda.DetectKeyPointsRaw(gpuObservedImage, null)) using (GpuMat gpuObservedDescriptors = surfCuda.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints)) //using (GpuMat tmp = new GpuMat()) //using (Stream stream = new Stream()) { matcher.KnnMatch(gpuObservedDescriptors, gpuObservedDescriptors, matches, k); surfCuda.DownloadKeypoints(gpuModelKeyPoints, observedKeyPoints); mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1); mask.SetTo(new MCvScalar(255)); //过滤匹配特征,,如果匹配点是比较罕见,那么就剔除 Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask); //返回数组中的非零元素 int nonZeroCount = CvInvoke.CountNonZero(mask); if (nonZeroCount >= 4) { //剔除 nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20); if (nonZeroCount >= 4) { homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2); } } } sw.Stop(); } } else #endif { using (UMat uModelImage = modelImage.ToUMat(AccessType.Read)) using (UMat uObservedImage = observedImage.ToUMat(AccessType.Read)) { //创建surf算法器 SURF surfCPU = new SURF(hessianThresh); //从源的图像提取描述符 UMat modelDescriptors = new UMat(); surfCPU.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false); sw = Stopwatch.StartNew(); //从观察图像中提取描述器 UMat observedDescriptors = new UMat(); surfCPU.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false); //Brute Force匹配 BFMatcher matcher = new BFMatcher(DistanceType.L2); matcher.Add(modelDescriptors); //matches:VectorOfVectorOfDMatch //observedDescriptors:VectorOfKeyPoint matcher.KnnMatch(observedDescriptors, matches, k, null); mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1); mask.SetTo(new MCvScalar(255)); //过滤匹配特征,,如果匹配点是比较罕见,那么就剔除 Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask); //返回数组中的非零元素 int nonZeroCount = CvInvoke.CountNonZero(mask); if (nonZeroCount >= 4) { //剔除那些旋转和缩放不与大多数匹配和旋转统一的特征点 nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20); if (nonZeroCount >= 4) { //使用RANDSAC算法获取单应性矩阵,如果矩阵不能恢复,返回null homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2); } } sw.Stop(); } } matchTime = sw.ElapsedMilliseconds; }