public StopSignDetector(Image <Bgr, Byte> stopSignModel) { _detector = new SURFDetector(500); using (Image <Gray, Byte> redMask = GetRedPixelMask(stopSignModel)) { _modelKeypoints = new VectorOfKeyPoint(); _modelDescriptors = new Mat(); _detector.DetectAndCompute(redMask, null, _modelKeypoints, _modelDescriptors, false); if (_modelKeypoints.Size == 0) { throw new Exception("No image feature has been found in the stop sign model"); } } _modelDescriptorMatcher = new BFMatcher(DistanceType.L2); _modelDescriptorMatcher.Add(_modelDescriptors); _octagon = new VectorOfPoint( new Point[] { new Point(1, 0), new Point(2, 0), new Point(3, 1), new Point(3, 2), new Point(2, 3), new Point(1, 3), new Point(0, 2), new Point(0, 1) }); }
public void TestSURF() { SURFDetector detector = new SURFDetector(500); //ParamDef[] parameters = detector.GetParams(); EmguAssert.IsTrue(TestFeature2DTracker(detector, detector), "Unable to find homography matrix"); }
public List <VectorOfKeyPoint> SURF_BruteForceMatcher(Image <Gray, byte> model, Image <Gray, byte> observed, int hessianThreshould, out SURFDetector surfCPU) { surfCPU = new SURFDetector(hessianThreshould, false); List <VectorOfKeyPoint> KeyPointsList = new List <VectorOfKeyPoint>(); VectorOfKeyPoint modelKeyPoints; VectorOfKeyPoint observedKeyPoints; try { modelKeyPoints = surfCPU.DetectKeyPointsRaw(model, null); // Extract features from the object image observedKeyPoints = surfCPU.DetectKeyPointsRaw(observed, null); // Extract features from the observed image if (modelKeyPoints.Size <= 0) { throw new System.ArgumentException("Can't find any keypoints in your model image!"); } KeyPointsList.Add(modelKeyPoints); KeyPointsList.Add(observedKeyPoints); } catch (Exception e) { Log.WriteLine("SURF_BruteForceMatcher: " + e.Message); Console.WriteLine(e.Message); throw e; } return(KeyPointsList); }
public static extern void cvExtractSURF( IntPtr image, IntPtr mask, ref IntPtr keypoints, ref IntPtr descriptors, IntPtr storage, SURFDetector parameters, int useProvidedKeyPoints);
public StopSignDetector(Image <Bgr, Byte> stopSignModel) { _detector = new SURFDetector(500, false); using (Image <Gray, Byte> redMask = GetRedPixelMask(stopSignModel)) { ImageFeature <float>[] features = _detector.DetectFeatures(redMask, null); if (features.Length == 0) { throw new Exception("No image feature has been found in the stop sign model"); } _tracker = new Features2DTracker <float>(features); } _octagonStorage = new MemStorage(); _octagon = new Contour <Point>(_octagonStorage); _octagon.PushMulti(new Point[] { new Point(1, 0), new Point(2, 0), new Point(3, 1), new Point(3, 2), new Point(2, 3), new Point(1, 3), new Point(0, 2), new Point(0, 1) }, Emgu.CV.CvEnum.BACK_OR_FRONT.FRONT); }
public void TestBOWKmeansTrainer() { Image <Gray, byte> box = EmguAssert.LoadImage <Gray, byte>("box.png"); SURFDetector detector = new SURFDetector(500); VectorOfKeyPoint kpts = new VectorOfKeyPoint(); Mat descriptors = new Mat(); detector.DetectAndCompute(box, null, kpts, descriptors, false); BOWKMeansTrainer trainer = new BOWKMeansTrainer(100, new MCvTermCriteria(), 3, CvEnum.KMeansInitType.PPCenters); trainer.Add(descriptors); Mat vocabulary = new Mat(); trainer.Cluster(vocabulary); BFMatcher matcher = new BFMatcher(DistanceType.L2); BOWImgDescriptorExtractor extractor = new BOWImgDescriptorExtractor(detector, matcher); extractor.SetVocabulary(vocabulary); Mat descriptors2 = new Mat(); extractor.Compute(box, kpts, descriptors2); }
public SurfDetector(String model) { surfCPU = new SURFDetector(500, false); Image <Bgr, Byte> color = new Image <Bgr, byte>(model); modelImage = color.Convert <Gray, Byte>(); //extract features from the object image modelKeyPoints = surfCPU.DetectKeyPointsRaw(modelImage, null); modelDescriptors = surfCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints); matcher = new BruteForceMatcher <float>(DistanceType.L2); matcher.Add(modelDescriptors); //for computers with awesome GPUs... if (GpuInvoke.HasCuda) { /*surfGPU = new GpuSURFDetector(500, 4, 4, false, 0.01f, true); * gpuModelImage = new GpuImage<Gray, byte>(modelImage); * gpuModelKeyPoints = surfGPU.DetectKeyPointsRaw(gpuModelImage, null); * gpuModelDescriptors = surfGPU.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints); * matcher_gpu = new GpuBruteForceMatcher<float>(DistanceType.L2); * modelKeyPoints = new VectorOfKeyPoint(); * surfGPU.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);*/ } }
public void ProcessImage(Image <Gray, Byte> modelImage) { _modelKeyPoints = new VectorOfKeyPoint(); _modelImage = modelImage; SURFDetector surfCPU = new SURFDetector(500, false); _feature = surfCPU.DetectAndCompute(modelImage, null, _modelKeyPoints); }
public void IndexFiles(FileInfo[] imageFiles, System.ComponentModel.BackgroundWorker IndexBgWorker, Action <string> logWriter, SurfSettings surfSetting = null) { #region Surf Dectator Region double hessianThresh = 500; double uniquenessThreshold = 0.8; if (surfSetting != null) { hessianThresh = surfSetting.HessianThresh.Value; uniquenessThreshold = surfSetting.UniquenessThreshold.Value; } SURFDetector surfDectector = new SURFDetector(hessianThresh, false); #endregion List <SURFRecord2> surfRecord2List = new List <SURFRecord2>(); Stopwatch sw1, sw2; sw1 = Stopwatch.StartNew(); logWriter("Index started..."); int totalFileCount = imageFiles.Length; for (int i = 0; i < totalFileCount; i++) { var fi = imageFiles[i]; using (Image <Gray, byte> observerImage = new Image <Gray, byte>(fi.FullName)) { ImageFeature <float>[] observerFeatures = surfDectector.DetectFeatures(observerImage, null); if (observerFeatures.Length > 4) { SURFRecord2 record = new SURFRecord2 { Id = i, ImageName = fi.Name, ImagePath = fi.FullName, observerFeatures = observerFeatures }; surfRecord2List.Add(record); } else { Debug.WriteLine(fi.Name + " skip from index, because it didn't have significant feature"); } } IndexBgWorker.ReportProgress(i); } SurfRepository.AddSURFRecord2List(surfRecord2List); sw1.Stop(); logWriter(string.Format("Index Complete, it tooked {0} ms. Saving Repository...", sw1.ElapsedMilliseconds)); sw2 = Stopwatch.StartNew(); SurfRepository.SaveRepository(SurfAlgo.Linear); sw2.Stop(); logWriter(string.Format("Index tooked {0} ms. Saving Repository tooked {1} ms", sw1.ElapsedMilliseconds, sw2.ElapsedMilliseconds)); }
public void TestSURFBlankImage() { SURFDetector detector = new SURFDetector(500); Image <Gray, Byte> img = new Image <Gray, byte>(1024, 900); VectorOfKeyPoint vp = new VectorOfKeyPoint(); Mat descriptors = new Mat(); detector.DetectAndCompute(img, null, vp, descriptors, false); }
private static void FindMatch(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage, SurfSettings surfSettings, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, out Matrix <int> indices, out Matrix <byte> mask, out HomographyMatrix homography) { #region Surf Dectator Region double hessianThresh = 500; double uniquenessThreshold = 0.8; if (surfSettings != null) { hessianThresh = surfSettings.HessianThresh.Value; uniquenessThreshold = surfSettings.UniquenessThreshold.Value; } SURFDetector surfCPU = new SURFDetector(hessianThresh, false); #endregion int k = 2; Stopwatch watch; homography = null; //extract features from the object image modelKeyPoints = new VectorOfKeyPoint(); Matrix <float> modelDescriptors = surfCPU.DetectAndCompute(modelImage, null, modelKeyPoints); watch = Stopwatch.StartNew(); // extract features from the observed image observedKeyPoints = new VectorOfKeyPoint(); Matrix <float> observedDescriptors = surfCPU.DetectAndCompute(observedImage, null, observedKeyPoints); BruteForceMatcher <float> matcher = new BruteForceMatcher <float>(DistanceType.L2); matcher.Add(modelDescriptors); indices = new Matrix <int>(observedDescriptors.Rows, k); using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k)) { matcher.KnnMatch(observedDescriptors, indices, dist, k, null); mask = new Matrix <byte>(dist.Rows, 1); mask.SetValue(255); Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask); } int nonZeroCount = CvInvoke.cvCountNonZero(mask); if (nonZeroCount >= 4) { nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20); if (nonZeroCount >= 4) { homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2); } } watch.Stop(); matchTime = watch.ElapsedMilliseconds; }
public static Image <Bgr, Byte> DrawFeatures(Image <Gray, Byte> modelImage) { SURFDetector surfCPU = new SURFDetector(500, false); VectorOfKeyPoint modelKeyPoints = new VectorOfKeyPoint(); Matrix <float> modelDescriptors = surfCPU.DetectAndCompute(modelImage, null, modelKeyPoints); Image <Bgr, Byte> result = Features2DToolbox.DrawKeypoints(modelImage, modelKeyPoints, new Bgr(0, 0, 255), Features2DToolbox.KeypointDrawType.DEFAULT); return(result); }
private void CreateSurfaceTracker() { surfaceParameters = new SURFDetector(500, false); using (Image <Bgr, Byte> stopSignModel = new Image <Bgr, Byte>(Properties.Resources.SignModel)) using (Image <Gray, Byte> redMask = GetRedPixelMask(stopSignModel)) { featureTracker = new Features2DTracker <float>(surfaceParameters.DetectFeatures(redMask, null)); } }
private ImageFeature <float>[] SURF(Bitmap M) { Image <Gray, byte> image = new Image <Gray, byte>(M); SURFDetector sift = new SURFDetector(1.0f, true); VectorOfKeyPoint keys = new VectorOfKeyPoint(); MKeyPoint[] key = sift.DetectKeyPoints(image, null); ImageFeature <float>[] res = sift.ComputeDescriptors(image, null, key); return(res); }
public Homography() { InitializeComponent(); camera = new Capture(@"F:\Working\Final phase\DataSet\20150409_13-34-33.asf"); surfParam = new SURFDetector(500, false); modelImage = new Image <Rgb, byte>(@"C:\Users\ahmed nady\Pictures\modelImage.bmp"); modelFeatures = surfParam.DetectFeatures(modelImage.Convert <Gray, byte>(), null); //Create a Feature Tracker tracker = new Features2DTracker <float>(modelFeatures); }
public Surf() { InitializeComponent(); camera = new Capture(0); surfParam = new SURFDetector(500, false); modelImage = new Image <Rgb, byte>(@"C:\Users\ahmed nady\Desktop\hand.jpg"); //extract features from the object image modelFeatures = surfParam.DetectFeatures(modelImage.Convert <Gray, byte>(), null); //Create a Feature Tracker tracker = new Features2DTracker <float>(modelFeatures); }
public Tuple <Image <Bgr, byte>, HomographyMatrix> DrawHomography(Image <Gray, byte> model, Image <Gray, byte> observed, double uniquenessThreshold) { HomographyMatrix homography = null; Image <Bgr, Byte> result = observed.Convert <Bgr, byte>(); SURFDetector surfCPU = new SURFDetector(500, false); VectorOfKeyPoint modelKeyPoints; VectorOfKeyPoint observedKeyPoints; Matrix <int> indices; Matrix <byte> mask; int k = 2; modelKeyPoints = surfCPU.DetectKeyPointsRaw(model, null); // Extract features from the object image Matrix <float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(model, null, modelKeyPoints); observedKeyPoints = surfCPU.DetectKeyPointsRaw(observed, null); // Extract features from the observed image if (modelKeyPoints.Size <= 0) { throw new System.ArgumentException("Can't find any keypoints in your model image!"); } if (observedKeyPoints.Size > 0) { Matrix <float> observedDescriptors = surfCPU.ComputeDescriptorsRaw(observed, null, observedKeyPoints); BruteForceMatcher <float> matcher = new BruteForceMatcher <float> (DistanceType.L2); matcher.Add(modelDescriptors); indices = new Matrix <int> (observedDescriptors.Rows, k); using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k)) { matcher.KnnMatch(observedDescriptors, indices, dist, k, null); mask = new Matrix <byte> (dist.Rows, 1); mask.SetValue(255); Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask); } int nonZeroCount = CvInvoke.cvCountNonZero(mask); if (nonZeroCount >= 10) { nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20); if (nonZeroCount >= 10) { homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2); } } result = Features2DToolbox.DrawMatches(model, modelKeyPoints, observed, observedKeyPoints, indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT); } return(new Tuple <Image <Bgr, byte>, HomographyMatrix>(result, homography)); }
//主要是在這端對比的 public void FindMatch(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, out Matrix <int> indices, out Matrix <byte> mask, out HomographyMatrix homography) { int k = 2; double uniquenessThreshold = 0.8; SURFDetector surfCPU = new SURFDetector(500, false); //設定處理特徵值的方式 Stopwatch watch; //監看處理時間 homography = null; //如果相同,取得四邊形 //extract features from the object image modelKeyPoints = new VectorOfKeyPoint(); Matrix <float> modelDescriptors = surfCPU.DetectAndCompute(modelImage, null, modelKeyPoints); //modelKeyPoints : 算出 特徵點? //modelDescriptors : // extract features from the observed image observedKeyPoints = new VectorOfKeyPoint(); Matrix <float> observedDescriptors = surfCPU.DetectAndCompute(observedImage, null, observedKeyPoints); //observedKeyPoints : 取得特徵點 // //ImagePrecess processor = new ImagePrecess(observedImage.ToBitmap(),320,240); //observedDescriptors = processor.GetImageFeature(); //observedKeyPoints=processor.GetImageVectorOfKeyPoint(); watch = Stopwatch.StartNew(); // BruteForceMatcher <float> matcher = new BruteForceMatcher <float>(DistanceType.L2); matcher.Add(modelDescriptors); indices = new Matrix <int>(observedDescriptors.Rows, k); using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k)) { matcher.KnnMatch(observedDescriptors, indices, dist, k, null); //取得對比 mask = new Matrix <byte>(dist.Rows, 1); mask.SetValue(255); Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);//會把剛剛match完的結果抓來看是不是不明確或是不確定的,而跑完的結果放在mask中。 } int nonZeroCount = CvInvoke.cvCountNonZero(mask); if (nonZeroCount >= 4) { nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20); if (nonZeroCount >= 4) { homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2); } } watch.Stop(); matchTime = watch.ElapsedMilliseconds; }
/// <summary> /// 計算特徵點 /// </summary> /// <param name="srcImage">來源影像</param> /// <returns>回傳特徵類別</returns> public static SURFFeatureData CalSURFFeature(Image <Bgr, Byte> srcImage) { SURFDetector surfCPU = new SURFDetector(new MCvSURFParams(1200, false)); //預設500 VectorOfKeyPoint keyPoints; Matrix <float> descriptors = null; using (Image <Gray, Byte> grayImg = srcImage.Convert <Gray, Byte>()) { keyPoints = surfCPU.DetectKeyPointsRaw(grayImg, null); descriptors = surfCPU.ComputeDescriptorsRaw(grayImg, null, keyPoints); } return(new SURFFeatureData(srcImage.Copy(), keyPoints, descriptors)); }
Matrix <float> ComputeSingleDescriptors(string fileName) { Matrix <float> descs; detector = new SURFDetector(surfHessianThresh, surfExtendedFlag); using (Image <Gray, byte> img = new Image <Gray, byte>(fileName)) { VectorOfKeyPoint keyPoints = detector.DetectKeyPointsRaw(img, null); descs = detector.ComputeDescriptorsRaw(img, null, keyPoints); } return(descs); }
Matrix <float> CursorComputeSingleDescriptor(ref Image <Gray, byte> source) { Matrix <float> descs; cursorDetector = new SURFDetector(surfHessianThresh, surfExtendedFlag); VectorOfKeyPoint keyPoints = detector.DetectKeyPointsRaw(source, null); descs = detector.ComputeDescriptorsRaw(source, null, keyPoints); //descs = detector.DetectAndCompute(source, null, keyPoints); return(descs); }
public void TestGridAdaptedFeatureDetectorRepeatedRun() { Image <Gray, byte> box = EmguAssert.LoadImage <Gray, byte>("box.png"); SURFDetector surfdetector = new SURFDetector(400); GridAdaptedFeatureDetector detector = new GridAdaptedFeatureDetector(surfdetector, 1000, 2, 2); VectorOfKeyPoint kpts1 = new VectorOfKeyPoint(); detector.DetectRaw(box, kpts1); VectorOfKeyPoint kpts2 = new VectorOfKeyPoint(); detector.DetectRaw(box, kpts2); EmguAssert.IsTrue(kpts1.Size == kpts2.Size); }
Matrix <float> CursorComputeSingleDescriptor() { tempCursorSourceImage = new Image <Gray, byte>(cursorSourceSkin.Bitmap); Matrix <float> descs; cursorDetector = new SURFDetector(surfHessianThresh, surfExtendedFlag); VectorOfKeyPoint keyPoints = detector.DetectKeyPointsRaw(tempCursorSourceImage, null); descs = detector.ComputeDescriptorsRaw(tempCursorSourceImage, null, keyPoints); //descs = detector.DetectAndCompute(source, null, keyPoints); return(descs); }
public SurfStopSignDetector() { detector = new SURFDetector(500, false); Image <Bgr, Byte> stopSignModel = new Image <Bgr, Byte>("models\\stop-sign-model.png"); Image <Gray, Byte> redMask = GetRedPixelMask(stopSignModel); tracker = new Features2DTracker <float>(detector.DetectFeatures(redMask, null)); octagonStorage = new MemStorage(); octagon = new Contour <Point>(octagonStorage); octagon.PushMulti(new Point[] { new Point(1, 0), new Point(2, 0), new Point(3, 1), new Point(3, 2), new Point(2, 3), new Point(1, 3), new Point(0, 2), new Point(0, 1) }, Emgu.CV.CvEnum.BACK_OR_FRONT.FRONT); }
public List <Keypoint> usingSurf(Bitmap image) { SURFDetector surf = new SURFDetector(750, false); Image <Gray, Byte> modelImage = new Image <Gray, byte>(new Bitmap(image)); VectorOfKeyPoint modelKeyPoints = surf.DetectKeyPointsRaw(modelImage, null); MKeyPoint[] keypoints = modelKeyPoints.ToArray(); Keypoint key; List <Keypoint> keypointsList = new List <Keypoint>(); foreach (MKeyPoint keypoint in keypoints) { key = new Keypoint(keypoint.Point.X, keypoint.Point.Y, keypoint.Size); keypointsList.Add(key); } return(keypointsList); }
/// <summary> /// 計算特徵點 /// </summary> /// <param name="srcImage">來源影像</param> /// <returns>回傳特徵類別</returns> public static SURFFeatureData CalSURFFeature(Image <Bgr, Byte> srcImage) { SURFDetector surfCPU = new SURFDetector(new MCvSURFParams(1200, false)); //預設500 VectorOfKeyPoint keyPoints; Matrix <float> descriptors = null; Stopwatch watch; watch = Stopwatch.StartNew(); using (Image <Gray, Byte> grayImg = srcImage.Convert <Gray, Byte>()) { keyPoints = surfCPU.DetectKeyPointsRaw(grayImg, null); descriptors = surfCPU.ComputeDescriptorsRaw(grayImg, null, keyPoints); } watch.Stop(); Console.WriteLine("\nExtract SURF time=> " + watch.ElapsedMilliseconds.ToString() + "ms"); //抽取出的特徵點數量 Console.WriteLine("keypoint size:" + keyPoints.Size); return(new SURFFeatureData(srcImage.Copy(), keyPoints, descriptors)); }
private static bool IsModelInObserved( Image<Gray, byte> modelImage, Image<Gray, byte> observedImage, double similarityThreshold = 0.075 ) { var surfCpu = new SURFDetector(500, false); Matrix<byte> mask; int k = 2; double uniquenessThreshold = 0.8; //extract features from the object image var modelKeyPoints = surfCpu.DetectKeyPointsRaw( modelImage, null ); Matrix<float> modelDescriptors = surfCpu.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints); // extract features from the observed image var observedKeyPoints = surfCpu.DetectKeyPointsRaw( observedImage, null ); Matrix<float> observedDescriptors = surfCpu.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints); BruteForceMatcher<float> matcher = new BruteForceMatcher<float>(DistanceType.L2); matcher.Add( modelDescriptors ); var indices = new Matrix<int>( observedDescriptors.Rows, k ); using ( var dist = new Matrix<float>( observedDescriptors.Rows, k ) ) { matcher.KnnMatch( observedDescriptors, indices, dist, k, null ); mask = new Matrix<byte>( dist.Rows, 1 ); mask.SetValue( 255 ); Features2DToolbox.VoteForUniqueness( dist, uniquenessThreshold, mask ); } int keypointMatchCount = CvInvoke.cvCountNonZero( mask ); if ( keypointMatchCount >= 4 ) { keypointMatchCount = Features2DToolbox.VoteForSizeAndOrientation( modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20 ); if ( keypointMatchCount >= 4 ) { Features2DToolbox.GetHomographyMatrixFromMatchedFeatures( modelKeyPoints, observedKeyPoints, indices, mask, 2 ); } } var similarity = (double)keypointMatchCount / observedKeyPoints.Size; return similarity > similarityThreshold; }
private void SURFfeature(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, out Matrix <int> indices, out Matrix <byte> mask, out HomographyMatrix homography) { int k = 2; double uniquenessThreshold = 0.8; SURFDetector surfCPU = new SURFDetector(300, false); homography = null; //extract features from the object image modelKeyPoints = new VectorOfKeyPoint(); Matrix <float> modelDescriptors = surfCPU.DetectAndCompute(modelImage, null, modelKeyPoints); // extract features from the observed image observedKeyPoints = new VectorOfKeyPoint(); Matrix <float> observedDescriptors = surfCPU.DetectAndCompute(observedImage, null, observedKeyPoints); BruteForceMatcher <float> matcher = new BruteForceMatcher <float>(DistanceType.L2); matcher.Add(modelDescriptors); indices = new Matrix <int>(observedDescriptors.Rows, k); using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k)) { matcher.KnnMatch(observedDescriptors, indices, dist, k, null); mask = new Matrix <byte>(dist.Rows, 1); mask.SetValue(255); Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask); } int nonZeroCount = CvInvoke.cvCountNonZero(mask); if (nonZeroCount >= 4) { nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20); if (nonZeroCount >= 4) { homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2); } } }
public void TestDetectorColor() { Image <Bgr, byte> box = EmguAssert.LoadImage <Bgr, byte>("box.png"); Image <Gray, byte> gray = box.Convert <Gray, Byte>(); SURFDetector surf = new SURFDetector(400); OpponentColorDescriptorExtractor opponentSurf = new OpponentColorDescriptorExtractor(surf); SIFTDetector sift = new SIFTDetector(); OpponentColorDescriptorExtractor opponentSift = new OpponentColorDescriptorExtractor(sift); //using (Util.VectorOfKeyPoint kpts = surf.DetectKeyPointsRaw(gray, null)) using (Util.VectorOfKeyPoint kpts = new VectorOfKeyPoint()) { sift.DetectRaw(gray, kpts); for (int i = 1; i < 2; i++) { using (Mat surfDescriptors = new Mat()) { opponentSurf.Compute(box, kpts, surfDescriptors); //EmguAssert.IsTrue(surfDescriptors.Width == (surf.SURFParams.Extended == 0 ? 64 : 128) * 3); } //TODO: Find out why the following test fails using (Mat siftDescriptors = new Mat()) { sift.Compute(gray, kpts, siftDescriptors); EmguAssert.IsTrue(siftDescriptors.Cols == sift.GetDescriptorSize()); } int siftDescriptorSize = sift.GetDescriptorSize(); using (Mat siftDescriptors = new Mat()) { opponentSift.Compute(box, kpts, siftDescriptors); EmguAssert.IsTrue(siftDescriptors.Cols == siftDescriptorSize * 3); } } } }
public StopSignDetector(Image <Bgr, Byte> stopSignModel) { _detector = new SURFDetector(500, false); using (Image <Gray, Byte> redMask = GetRedPixelMask(stopSignModel)) { ImageFeature <float>[] temp = _detector.DetectFeatures(redMask, null); _tracker = new Features2DTracker <float>(temp); } _octagonStorage = new MemStorage(); _octagon = new Contour <Point>(_octagonStorage); _octagon.PushMulti(new Point[] { new Point(1, 0), new Point(2, 0), new Point(3, 1), new Point(3, 2), new Point(2, 3), new Point(1, 3), new Point(0, 2), new Point(0, 1) }, Emgu.CV.CvEnum.BACK_OR_FRONT.FRONT ); }