public void TestMSER() { MSERDetector keyPointDetector = new MSERDetector(); SIFTDetector descriptorGenerator = new SIFTDetector(); //ParamDef[] parameters = keyPointDetector.GetParams(); TestFeature2DTracker(keyPointDetector, descriptorGenerator); }
public void TestGFTTDetector() { GFTTDetector keyPointDetector = new GFTTDetector(1000, 0.01, 1, 3, false, 0.04); SIFTDetector descriptorGenerator = new SIFTDetector(); ParamDef[] parameters = keyPointDetector.GetParams(); TestFeature2DTracker(keyPointDetector, descriptorGenerator); }
private ImageFeature <float>[] SIFFt(Bitmap M) { Image <Gray, byte> image = new Image <Gray, byte>(M); SIFTDetector sift = new SIFTDetector(); VectorOfKeyPoint keys = new VectorOfKeyPoint(); MKeyPoint[] key = sift.DetectKeyPoints(image, null); ImageFeature <float>[] res = sift.ComputeDescriptors(image, null, key); return(res); }
public void TestStar() { StarDetector keyPointDetector = new StarDetector(); //SURFDetector descriptorGenerator = new SURFDetector(500, false); SIFTDetector descriptorGenerator = new SIFTDetector(); //ParamDef[] parameters = keyPointDetector.GetParams(); TestFeature2DTracker(keyPointDetector, descriptorGenerator); }
public List <Keypoint> usingSift(Bitmap image) { SIFTDetector sift = new SIFTDetector(); Image <Gray, Byte> modelImage = new Image <Gray, byte>(new Bitmap(image)); VectorOfKeyPoint modelKeyPoints = sift.DetectKeyPointsRaw(modelImage, null); MKeyPoint[] keypoints = modelKeyPoints.ToArray(); Keypoint key; List <Keypoint> keypointsList = new List <Keypoint>(); foreach (MKeyPoint keypoint in keypoints) { key = new Keypoint(keypoint.Point.X, keypoint.Point.Y, keypoint.Size); keypointsList.Add(key); } return(keypointsList); }
public void TestDetectorColor() { Image <Bgr, byte> box = EmguAssert.LoadImage <Bgr, byte>("box.png"); Image <Gray, byte> gray = box.Convert <Gray, Byte>(); SURFDetector surf = new SURFDetector(400); OpponentColorDescriptorExtractor opponentSurf = new OpponentColorDescriptorExtractor(surf); SIFTDetector sift = new SIFTDetector(); OpponentColorDescriptorExtractor opponentSift = new OpponentColorDescriptorExtractor(sift); //using (Util.VectorOfKeyPoint kpts = surf.DetectKeyPointsRaw(gray, null)) using (Util.VectorOfKeyPoint kpts = new VectorOfKeyPoint()) { sift.DetectRaw(gray, kpts); for (int i = 1; i < 2; i++) { using (Mat surfDescriptors = new Mat()) { opponentSurf.Compute(box, kpts, surfDescriptors); //EmguAssert.IsTrue(surfDescriptors.Width == (surf.SURFParams.Extended == 0 ? 64 : 128) * 3); } //TODO: Find out why the following test fails using (Mat siftDescriptors = new Mat()) { sift.Compute(gray, kpts, siftDescriptors); EmguAssert.IsTrue(siftDescriptors.Cols == sift.GetDescriptorSize()); } int siftDescriptorSize = sift.GetDescriptorSize(); using (Mat siftDescriptors = new Mat()) { opponentSift.Compute(box, kpts, siftDescriptors); EmguAssert.IsTrue(siftDescriptors.Cols == siftDescriptorSize * 3); } } } }
//public static string DoImageRegonition(string FullFeaFName, Stream ImgStream) public static Dictionary <string, string> DoImageRegonition(string[] SevenFeas, string[] CocaFeas, Stream ImgStream) { Dictionary <string, string> regres = new Dictionary <string, string>(4); StringBuilder sb = new StringBuilder(); const int Seven_DV = 400; const int Coca_DV = 300; try { SIFTDetector siftdector = new SIFTDetector(); //the following code is unnecessary because we will extract the feature first. // this other way this image is pre-transformed to gray-scale. //the following codes are needed to be refactory // Image<Gray, Byte> modelImage = new Image<Gray, byte>(FullMoldeImg); //Image<Gray, Byte> modelImage = new Image<Gray, byte>(FullMoldeImgName); BinaryFormatter _bf = new BinaryFormatter(); int sevenlen = SevenFeas.Length; int cocalen = CocaFeas.Length; //initial the dictionary variable regres.Add("seven", "no"); regres.Add("coca", "no"); regres.Add("ma", "none"); regres.Add("excep", "none"); //Initialize the image that to be comparased Image <Gray, Byte> observedImage = GetCVImage(ImgStream); MKeyPoint[] objmkps = siftdector.DetectKeyPoints(observedImage); ImageFeature[] imageFeatures = siftdector.ComputeDescriptors(observedImage, objmkps); //PointF[] _obimgPA = GetPointFfromFeatures(imageFeatures, imageFeatures.Length); //int _obimgPN = _obimgPA.Length; //Doing seven matching for (int idx = 0; idx < sevenlen; idx++) { //Get the feature file Stream stream = File.Open(SevenFeas[idx], FileMode.Open); //Deserilizing the file to get the feature ImageFeature[] sevenFeatures = (ImageFeature[])_bf.Deserialize(stream); stream.Dispose(); int slen = sevenFeatures.Length; //PointF[] sevenPA = GetPointFfromFeatures(sevenFeatures, _obimgPN); //set up the tractor Features2DTracker seventrac = new Features2DTracker(sevenFeatures); ////Doing seven matching Features2DTracker.MatchedImageFeature[] sevenmatchedfea = seventrac.MatchFeature(imageFeatures, 2, 20); sevenmatchedfea = Features2DTracker.VoteForUniqueness(sevenmatchedfea, 0.8); sevenmatchedfea = Features2DTracker.VoteForSizeAndOrientation(sevenmatchedfea, 1.5, 20); //Get matching result matrix HomographyMatrix sevenhomography = Features2DTracker.GetHomographyMatrixFromMatchedFeatures(sevenmatchedfea); //Matrix<float> sevenhomography = CameraCalibration.FindHomography(sevenPA,_obimgPA,HOMOGRAPHY_METHOD.RANSAC,3).Convert<float>(); //sevenmatchedfea. //fill result into dictionary variable if (sevenhomography != null) { if (Math.Abs(sevenhomography.Sum) > Seven_DV) { regres["seven"] = "yes"; } sb.Append("ssum:"); sb.Append(sevenhomography.Sum.ToString()); //sb.Append("| sidx:"); // sb.Append(idx.ToString()); break; } } //Doing Coca image matching for (int idx2 = 0; idx2 < cocalen; idx2++) { //Get the feature file Stream stream = File.Open(CocaFeas[idx2], FileMode.Open); //Deserilizing the file to get the feature ImageFeature[] cocaFeatures = (ImageFeature[])_bf.Deserialize(stream); stream.Dispose(); //PointF[] cocaPA = GetPointFfromFeatures(cocaFeatures, _obimgPN); //cocaFeatures.CopyTo(cocaPA, 0); //Matrix<float> cocahomography = CameraCalibration.FindHomography(cocaPA, _obimgPA, HOMOGRAPHY_METHOD.RANSAC, 3).Convert<float>(); //set up the tractor Features2DTracker cocatrac = new Features2DTracker(cocaFeatures); ////Doing seven matching Features2DTracker.MatchedImageFeature[] cocamatchedfea = cocatrac.MatchFeature(imageFeatures, 2, 20); cocamatchedfea = Features2DTracker.VoteForUniqueness(cocamatchedfea, 0.8); cocamatchedfea = Features2DTracker.VoteForSizeAndOrientation(cocamatchedfea, 1.5, 20); //Get matching result matrix HomographyMatrix cocahomography = Features2DTracker.GetHomographyMatrixFromMatchedFeatures(cocamatchedfea); //fill result into dictionary variable if (cocahomography != null) { if (Math.Abs(cocahomography.Sum) > Coca_DV) { regres["coca"] = "yes"; } sb.Append("#csum:"); sb.Append(cocahomography.Sum.ToString()); //sb.Append(",cidx:"); //sb.Append(idx2.ToString()); break; } } } catch (Exception err) { regres["excep"] = err.Message; Console.WriteLine(err.Message); } if (sb.Length > 0) { regres["ma"] = sb.ToString(); sb = null; } return(regres); }
public static Image <Bgr, Byte> SIFT(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage) { bool isFound = false; long matchTime; Stopwatch watch; HomographyMatrix homography = null; SIFTDetector siftCPU = new SIFTDetector(); VectorOfKeyPoint modelKeyPoints; VectorOfKeyPoint observedKeyPoints; Matrix <int> indices; Matrix <byte> mask; int k = 2; double uniquenessThreshold = 0.8; watch = Stopwatch.StartNew(); //extract features from the object image modelKeyPoints = siftCPU.DetectKeyPointsRaw(modelImage, null); Matrix <float> modelDescriptors = siftCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints); // extract features from the observed image observedKeyPoints = siftCPU.DetectKeyPointsRaw(observedImage, null); Matrix <float> observedDescriptors = siftCPU.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints); BruteForceMatcher <float> matcher = new BruteForceMatcher <float>(DistanceType.L2); matcher.Add(modelDescriptors); indices = new Matrix <int>(observedDescriptors.Rows, k); using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k)) { matcher.KnnMatch(observedDescriptors, indices, dist, k, null); mask = new Matrix <byte>(dist.Rows, 1); mask.SetValue(255); Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask); } int nonZeroCount = CvInvoke.cvCountNonZero(mask); if (nonZeroCount >= 4) { nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20); if (nonZeroCount >= 4) { homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2); } } watch.Stop(); //Draw the matched keypoints Image <Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints, indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT); #region draw the projected region on the image if (homography != null) { //draw a rectangle along the projected model Rectangle rect = modelImage.ROI; PointF[] pts = new PointF[] { new PointF(rect.Left, rect.Bottom), new PointF(rect.Right, rect.Bottom), new PointF(rect.Right, rect.Top), new PointF(rect.Left, rect.Top) }; homography.ProjectPoints(pts); if (CvInvoke.cvCountNonZero(mask) >= 10) { isFound = true; } result.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(Color.LightGreen), 5); } #endregion matchTime = watch.ElapsedMilliseconds; _richTextBox1.Clear(); _richTextBox1.AppendText("objek ditemukan: " + isFound + "\n"); _richTextBox1.AppendText("waktu pendeteksian SIFT: " + matchTime + "ms\n"); _richTextBox1.AppendText("fitur mentah pada model yang terdeteksi: " + modelKeyPoints.Size + "\n"); _richTextBox1.AppendText("match yang ditemukan: " + CvInvoke.cvCountNonZero(mask).ToString()); return(result); }
public void TestSIFT() { SIFTDetector detector = new SIFTDetector(); EmguAssert.IsTrue(TestFeature2DTracker(detector, detector), "Unable to find homography matrix"); }