public void TestCudaOrbDetector() { if (!CudaInvoke.HasCuda) { return; } using (Image <Bgr, Byte> img = new Image <Bgr, byte>("box.png")) using (GpuMat cudaImage = new GpuMat(img)) using (GpuMat grayCudaImage = new GpuMat()) using (CudaORBDetector detector = new CudaORBDetector(500)) using (VectorOfKeyPoint kpts = new VectorOfKeyPoint()) using (GpuMat keyPointMat = new GpuMat()) using (GpuMat descriptorMat = new GpuMat()) { CudaInvoke.CvtColor(cudaImage, grayCudaImage, ColorConversion.Bgr2Gray); detector.DetectAsync(grayCudaImage, keyPointMat); detector.Convert(keyPointMat, kpts); //detector.ComputeRaw(grayCudaImage, null, keyPointMat, descriptorMat); //detector.DownloadKeypoints(keyPointMat, kpts); foreach (MKeyPoint kpt in kpts.ToArray()) { img.Draw(new CircleF(kpt.Point, 3.0f), new Bgr(0, 255, 0), 1); } //ImageViewer.Show(img); } }
public CudaORBDetector CreateDetector() { var _cudaORB = new CudaORBDetector( this.model.NumberOfFeatures, this.model.ScaleFactor, this.model.NLevels, this.model.EdgeThreshold, this.model.firstLevel, this.model.WTK_A, this.model.ScoreType, this.model.PatchSize, this.model.FastThreshold, this.model.BlurForDescriptor ); return(_cudaORB); }
private string[] FindFeatureMatches(string sourceFile, string[] targetFiles, ObservableCollection <string> output) { //Emgu.CV.CvInvoke // Currently we are only interested in jpg files. targetFiles = targetFiles .Where(targetFile => { var extension = Path.GetExtension(targetFile).ToLower(); return(extension == ".jpg" || extension == ".jpeg"); }) .ToArray(); var matchingFiles = new List <string>(); using var sourceImage = CvInvoke.Imread(sourceFile, ImreadModes.Grayscale); using var sourceMat = new GpuMat(); //CudaInvoke.CvtColor(sourceImage, sourceMat, ColorConversion.Bgr2Bgra); sourceMat.Upload(sourceImage); using var sourceDescriptors = new GpuMat(); using var detector = new CudaORBDetector(); var sourceKeyPoints = detector.Detect(sourceMat, null); detector.Compute(sourceMat, new VectorOfKeyPoint(sourceKeyPoints), sourceDescriptors); //detector.DetectAndCompute(sourceImage, null, sourceKeyPoints, sourceDescriptors, false); Parallel.ForEach(targetFiles, new ParallelOptions { MaxDegreeOfParallelism = 40 }, targetFile => { try { if (targetFile == sourceFile) { return; // No need to match the original file. } if (new FileInfo(targetFile).Length == 0) // We cannot compare empty images. { return; } using var targetImage = CvInvoke.Imread(targetFile, ImreadModes.Grayscale); using var targetMat = new GpuMat(); targetMat.Upload(targetImage); // using var difference = new Mat(); // Cv2.Subtract(sourceImage, targetImage, difference); // // Cv2.Split(difference, out var split); // var r = split[0]; // var g = split[1]; // var b = split[2]; // var completeMatch = Cv2.CountNonZero(r) == 0 && Cv2.CountNonZero(g) == 0 && Cv2.CountNonZero(b) == 0; using var targetDescriptors = new GpuMat(); //var targetKeyPoints = new VectorOfKeyPoint(); using var detector2 = new CudaORBDetector(); var targetKeyPoints = detector2.Detect(targetMat, null); detector2.Compute(targetMat, new VectorOfKeyPoint(targetKeyPoints), targetDescriptors); //detector.DetectAndCompute(targetImage, null, targetKeyPoints, targetDescriptors, false); // Needed to compensate for some crashes. // See: https://stackoverflow.com/questions/25089393/opencv-flannbasedmatcher if (sourceKeyPoints.Length >= 2 && targetKeyPoints.Length >= 2) { using var matches = new VectorOfVectorOfDMatch(); using var matcher = new CudaBFMatcher(DistanceType.Hamming); matcher.KnnMatch(sourceDescriptors, targetDescriptors, matches, KnnMatchValue); var goodPoints = matches.ToArrayOfArray().Where(match => match.Length > 1) .Where(match => match[0].Distance < match[1].Distance * MatchDistance) //.Select(match => match[0]) .ToArray(); var matchCount = sourceKeyPoints.Length >= targetKeyPoints.Length ? sourceKeyPoints.Length : targetKeyPoints.Length; var matchQuality = (float)goodPoints.Length / matchCount; if (matchQuality >= MinimumMatchQuality) { using var outputImage = new Mat(); using var scaledOutputImage = new Mat(); Features2DToolbox.DrawMatches( sourceImage, new VectorOfKeyPoint(sourceKeyPoints), targetImage, new VectorOfKeyPoint(targetKeyPoints), new VectorOfVectorOfDMatch(goodPoints), outputImage, new Bgr(System.Drawing.Color.Yellow).MCvScalar, new Bgr(System.Drawing.Color.Red).MCvScalar); CvInvoke.Resize(outputImage, scaledOutputImage, System.Drawing.Size.Empty, 0.1f, 0.1f); Application.Current?.Dispatcher?.Invoke(() => CvInvoke.Imshow("Match preview", scaledOutputImage)); //Cv2.ImWrite(targetFile + ".comparison.jpg", scaledOutputImage); var sb = new StringBuilder(); sb.AppendLine($"{DateTime.Now} Matching:"); sb.AppendLine($"Source: {sourceFile}"); sb.AppendLine($"Target: {targetFile}"); sb.Append($"Match found with quality: {matchQuality}"); output.Add(sb.ToString()); } } } catch (Exception e) { var sb = new StringBuilder(); var exception = e.ToString().Replace(Environment.NewLine, " "); sb.Append($"{DateTime.Now} Unable to match file: {targetFile}: {exception}"); output.Add(sb.ToString()); } }); return(matchingFiles.ToArray()); }