public List<Keypoint> usingSift(Bitmap image) { SIFTDetector sift = new SIFTDetector(); Image<Gray, Byte> modelImage = new Image<Gray, byte>(new Bitmap(image)); VectorOfKeyPoint modelKeyPoints = sift.DetectKeyPointsRaw(modelImage, null); MKeyPoint[] keypoints = modelKeyPoints.ToArray(); Keypoint key; List<Keypoint> keypointsList = new List<Keypoint>(); foreach (MKeyPoint keypoint in keypoints) { key = new Keypoint(keypoint.Point.X, keypoint.Point.Y, keypoint.Size); keypointsList.Add(key); } return keypointsList; }
public bool testSIFT(Image<Gray, Byte> modelImage, Image<Gray, byte> observedImage) { bool isFound = false; HomographyMatrix homography = null; SIFTDetector siftCPU = new SIFTDetector(); VectorOfKeyPoint modelKeyPoints; VectorOfKeyPoint observedKeyPoints; Matrix<int> indices; Matrix<byte> mask; int k = 2; double uniquenessThreshold = 0.8; //extract features from the object image modelKeyPoints = siftCPU.DetectKeyPointsRaw(modelImage, null); Matrix<float> modelDescriptors = siftCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints); // extract features from the observed image observedKeyPoints = siftCPU.DetectKeyPointsRaw(observedImage, null); Matrix<float> observedDescriptors = siftCPU.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints); BruteForceMatcher<float> matcher = new BruteForceMatcher<float>(DistanceType.L2); matcher.Add(modelDescriptors); indices = new Matrix<int>(observedDescriptors.Rows, k); using (Matrix<float> dist = new Matrix<float>(observedDescriptors.Rows, k)) { matcher.KnnMatch(observedDescriptors, indices, dist, k, null); mask = new Matrix<byte>(dist.Rows, 1); mask.SetValue(255); Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask); } int nonZeroCount = CvInvoke.cvCountNonZero(mask); if (nonZeroCount >= 4) { nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20); if (nonZeroCount >= 4) homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2); } //Draw the matched keypoints Image<Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints, indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT); #region draw the projected region on the image if (homography != null) { //draw a rectangle along the projected model Rectangle rect = modelImage.ROI; PointF[] pts = new PointF[] { new PointF(rect.Left, rect.Bottom), new PointF(rect.Right, rect.Bottom), new PointF(rect.Right, rect.Top), new PointF(rect.Left, rect.Top)}; homography.ProjectPoints(pts); if (CvInvoke.cvCountNonZero(mask) >= 10) isFound = true; result.DrawPolyline(Array.ConvertAll<PointF, Point>(pts, Point.Round), true, new Bgr(Color.LightGreen), 5); } #endregion return isFound; }
/// <summary> /// Start matching period /// Based on Automatic Panoramic Image Stitching using Invariant Features. M Brown et. al. IJCV 2007 /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void buttonImageMatch_Click(object sender, EventArgs e) { if (imgStitchList.Count >= 2) { // Calculate SIFT features // Get SIFT from srcImg and dstImg SIFTDetector sift = new SIFTDetector(); List<ImageFeature<float>[]> features = new List<ImageFeature<float>[]>(); for (var i = 0; i < imgStitchList.Count; ++i) { Image<Gray, byte> grayImg = new Image<Gray, byte>(imgStitchList[i].ToBitmap()); features.Add(sift.DetectFeatures(grayImg, null)); Console.WriteLine(String.Format("Img No.{0}, Features: {1}", i, features[i].Length)); } // Get matching pictures <dstId, srcId>, then generate an order // We assume that the images are in horizontal order hMatList.Clear(); int sumWidth = 0; for (var dstId = 0; dstId + 1 < imgStitchList.Count; ++dstId) { int srcId = dstId + 1; ImageFeature<float>[] dstFeat = features[dstId]; ImageFeature<float>[] srcFeat = features[srcId]; int matchCount = 0; List<PointF> dstPntList = new List<PointF>(); List<PointF> srcPntList = new List<PointF>(); for (var i = 0; i < dstFeat.Length; ++i) { // Calculate distance: Brute Force double[] dist = new double[srcFeat.Length]; for (var j = 0; j < srcFeat.Length; ++j) { dist[j] = getEuclidDistance(dstFeat[i].Descriptor, srcFeat[j].Descriptor); } // Get 2-min distance points int minIndex; double minDist, minDist2; if (dist[0] < dist[1]) { minIndex = 0; minDist = dist[0]; minDist2 = dist[1]; } else { minIndex = 1; minDist = dist[1]; minDist2 = dist[0]; } for (var j = 2; j < srcFeat.Length; ++j) { if (dist[j] < minDist) { minIndex = j; minDist2 = minDist; minDist = dist[j]; } else if (dist[j] < minDist2) { minDist2 = dist[j]; } } // If the distance is small enough. if (minDist / minDist2 < SIFT_FEAT_DIST_THRESHOLD) { PointF pi = dstFeat[i].KeyPoint.Point; PointF pj = srcFeat[minIndex].KeyPoint.Point; matchGraph.DrawLine(matchPen, new PointF(pi.X + sumWidth, pi.Y), new PointF(pj.X + sumWidth + imgStitchList[dstId].Width, pj.Y)); //rawMatchPairs.Add(new Tuple<int, int>(i, minIndex)); dstPntList.Add(pi); srcPntList.Add(pj); ++matchCount; } } Console.WriteLine(String.Format("Dst:[{0}] <-> Src:[{1}], {2} matching pairs.", dstId, srcId, matchCount)); if (matchCount >= RANSAC_INIT_SET_SIZE) { // Use RANSAC (Ramdom Sample Consensus) as a filter HomographyMatrix hMat = CameraCalibration.FindHomography(srcPntList.ToArray(), dstPntList.ToArray(), Emgu.CV.CvEnum.HOMOGRAPHY_METHOD.RANSAC, RANSAC_REPROJ_THRESHOLD); if (hMat != null) { for (var r = 0; r < 3; ++r) { for (var c = 0; c < 3; ++c) Console.Write(hMat[r, c] + ", "); Console.Write("\n"); } hMatList.Add(hMat); // Show inliers for (var j = 0; j < srcPntList.Count; ++j) { // Calculate distance Matrix<double> srcPntMat, dstProjMat; double dstProjX, dstProjY; double dist; srcPntMat = new Matrix<double>(3, 1); srcPntMat[0, 0] = srcPntList[j].X; srcPntMat[1, 0] = srcPntList[j].Y; srcPntMat[2, 0] = 1; dstProjMat = hMat.Mul(srcPntMat); dstProjX = dstProjMat[0, 0] / dstProjMat[2, 0]; dstProjY = dstProjMat[1, 0] / dstProjMat[2, 0]; dist = Math.Sqrt(Math.Pow(dstProjX - dstPntList[j].X, 2) + Math.Pow(dstProjY - dstPntList[j].Y, 2)); if (dist <= RANSAC_REPROJ_THRESHOLD) { Console.WriteLine(String.Format("Valid point pair({0},{1}) <-> ({2},{3})", srcPntMat[0, 0], srcPntMat[1, 0], dstProjX, dstProjY)); matchGraph.DrawLine(inlierPen, new Point((int)dstProjX + sumWidth, (int)dstProjY), new Point((int)srcPntList[j].X + sumWidth + imgStitchList[dstId].Width, (int)srcPntList[j].Y)); } Console.WriteLine("No." + j + " distance: " + dist); } } } sumWidth += imgStitchList[dstId].Width; } } }
public static Image<Bgr, Byte> SIFT(Image<Gray, Byte> modelImage, Image<Gray, byte> observedImage) { bool isFound = false; long matchTime; Stopwatch watch; HomographyMatrix homography = null; SIFTDetector siftCPU = new SIFTDetector(); VectorOfKeyPoint modelKeyPoints; VectorOfKeyPoint observedKeyPoints; Matrix<int> indices; Matrix<byte> mask; int k = 2; double uniquenessThreshold = 0.8; watch = Stopwatch.StartNew(); //extract features from the object image modelKeyPoints = siftCPU.DetectKeyPointsRaw(modelImage, null); Matrix<float> modelDescriptors = siftCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints); // extract features from the observed image observedKeyPoints = siftCPU.DetectKeyPointsRaw(observedImage, null); Matrix<float> observedDescriptors = siftCPU.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints); BruteForceMatcher<float> matcher = new BruteForceMatcher<float>(DistanceType.L2); matcher.Add(modelDescriptors); indices = new Matrix<int>(observedDescriptors.Rows, k); using (Matrix<float> dist = new Matrix<float>(observedDescriptors.Rows, k)) { matcher.KnnMatch(observedDescriptors, indices, dist, k, null); mask = new Matrix<byte>(dist.Rows, 1); mask.SetValue(255); Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask); } int nonZeroCount = CvInvoke.cvCountNonZero(mask); if (nonZeroCount >= 4) { nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20); if (nonZeroCount >= 4) homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 2); } watch.Stop(); //Draw the matched keypoints Image<Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints, indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT); #region draw the projected region on the image if (homography != null) { //draw a rectangle along the projected model Rectangle rect = modelImage.ROI; PointF[] pts = new PointF[] { new PointF(rect.Left, rect.Bottom), new PointF(rect.Right, rect.Bottom), new PointF(rect.Right, rect.Top), new PointF(rect.Left, rect.Top)}; homography.ProjectPoints(pts); if (CvInvoke.cvCountNonZero(mask) >= 10) isFound = true; result.DrawPolyline(Array.ConvertAll<PointF, Point>(pts, Point.Round), true, new Bgr(Color.LightGreen), 5); } #endregion matchTime = watch.ElapsedMilliseconds; _richTextBox1.Clear(); _richTextBox1.AppendText("objek ditemukan: " + isFound + "\n"); _richTextBox1.AppendText("waktu pendeteksian SIFT: " + matchTime + "ms\n"); _richTextBox1.AppendText("fitur mentah pada model yang terdeteksi: " + modelKeyPoints.Size + "\n"); _richTextBox1.AppendText("match yang ditemukan: " + CvInvoke.cvCountNonZero(mask).ToString()); return result; }