Filter to mark (highlight) pairs of points in a image.
Inheritance: BaseInPlaceFilter
コード例 #1
0
        private void BtnCorrelation_OnClick(object sender, RoutedEventArgs e)
        {
            // Step 2: Match feature points using a correlation measure
            CorrelationMatching matcher = new CorrelationMatching(9);
            IntPoint[][] matches = matcher.Match(img1, img2, harrisPoints1, harrisPoints2);

            // Get the two sets of points
            correlationPoints1 = matches[0];
            correlationPoints2 = matches[1];

            // Concatenate the two images in a single image (just to show on screen)
            Concatenate concat = new Concatenate(img1);
            Bitmap img3 = concat.Apply(img2);

            // Show the marked correlations in the concatenated image
            PairsMarker pairs = new PairsMarker(
                correlationPoints1, // Add image1's width to the X points to show the markings correctly
                correlationPoints2.Apply(p => new IntPoint(p.X + img1.Width, p.Y)));

            PictureBox.Source = pairs.Apply(img3);
        }
コード例 #2
0
ファイル: MainForm.cs プロジェクト: BiYiTuan/framework
        private void btnCorrelation_Click(object sender, EventArgs e)
        {
            if (surfPoints1 == null)
            {
                MessageBox.Show("Please, click SURF button first! :-)");
                return;
            }

            // Step 2: Match feature points using a k-NN
            KNearestNeighborMatching matcher = new KNearestNeighborMatching(5);
            IntPoint[][] matches = matcher.Match(surfPoints1, surfPoints2);

            // Get the two sets of points
            correlationPoints1 = matches[0];
            correlationPoints2 = matches[1];

            // Concatenate the two images in a single image (just to show on screen)
            Concatenate concat = new Concatenate(img1);
            Bitmap img3 = concat.Apply(img2);

            // Show the marked correlations in the concatenated image
            PairsMarker pairs = new PairsMarker(
                correlationPoints1, // Add image1's width to the X points to show the markings correctly
                correlationPoints2.Apply(p => new IntPoint(p.X + img1.Width, p.Y)));

            pictureBox.Image = pairs.Apply(img3);
        }
コード例 #3
0
ファイル: MainForm.cs プロジェクト: BiYiTuan/framework
        private void btnRansac_Click(object sender, EventArgs e)
        {
            if (correlationPoints1 == null)
            {
                MessageBox.Show("Please, click Nearest Neighbor button first! :-)");
                return;
            }

            if (correlationPoints1.Length < 4 || correlationPoints2.Length < 4)
            {
                MessageBox.Show("Insufficient points to attempt a fit.");
                return;
            }

            // Step 3: Create the homography matrix using a robust estimator
            RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99);
            homography = ransac.Estimate(correlationPoints1, correlationPoints2);

            // Plot RANSAC results against correlation results
            IntPoint[] inliers1 = correlationPoints1.Submatrix(ransac.Inliers);
            IntPoint[] inliers2 = correlationPoints2.Submatrix(ransac.Inliers);

            // Concatenate the two images in a single image (just to show on screen)
            Concatenate concat = new Concatenate(img1);
            Bitmap img3 = concat.Apply(img2);

            // Show the marked correlations in the concatenated image
            PairsMarker pairs = new PairsMarker(
                inliers1, // Add image1's width to the X points to show the markings correctly
                inliers2.Apply(p => new IntPoint(p.X + img1.Width, p.Y)));

            pictureBox.Image = pairs.Apply(img3);
        }
コード例 #4
0
        /// <summary>
        /// Matches detected keypoints.
        /// </summary>
        private void MatchKeypoints()
        {
            // matching keypoints step needs at least two images
            if (input_images.Count < 2) return;

            // using RANSAC homography estimator
            RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99);
            CorrelationMatching matcher = new CorrelationMatching(9);

            Bitmap mergedImage = new Bitmap(input_images.Count * input_images[0].Width, input_images[0].Height); //it is assumed the images are of same size!
            Graphics graphics = Graphics.FromImage(mergedImage);

            IntPoint[][] matches;

            int cumulativeWidth = 0;
            int keypoints_cntr = 0;

            // draw all images
            for (int i = 0; i < input_images.Count; i++) {
                graphics.DrawImage(input_images[i], cumulativeWidth, 0, input_images[i].Width, input_images[i].Height);
                cumulativeWidth += input_images[i].Width;
            }

            // iterate through all images
            for (int i = 0; i < input_images.Count; i++)
            {

                if (i != input_images.Count - 1)
                {
                    // match detected keypoints with maximum cross-correlation algorithm
                    matches = matcher.Match(new Bitmap(input_images[i]), new Bitmap(input_images[i + 1]), keypoints[keypoints_cntr].ToArray(), keypoints[keypoints_cntr + 1].ToArray());

                    IntPoint[] correlationPoints1 = matches[0];
                    IntPoint[] correlationPoints2 = matches[1];

                    // Plot RANSAC results against correlation results
                    homography = ransac.Estimate(correlationPoints1, correlationPoints2);

                    // take only inliers - good matches
                    IntPoint[] inliers1 = correlationPoints1.Submatrix(ransac.Inliers);
                    IntPoint[] inliers2 = correlationPoints2.Submatrix(ransac.Inliers);

                    PairsMarker pairs = new PairsMarker(correlationPoints1, correlationPoints2.Apply(p => new IntPoint(p.X + input_images[i].Width, p.Y)));

                    // draw all matching pairs
                    for (int j = 0; j < pairs.Points1.Count(); j++) {
                        if (i % 2 == 0) graphics.DrawLine(new Pen(Color.Red, 1.5f), new System.Drawing.Point(correlationPoints1[j].X + i * input_images[i].Width, correlationPoints1[j].Y), new System.Drawing.Point(correlationPoints2[j].X + (i + 1) * input_images[i].Width, correlationPoints2[j].Y));
                    }

                    // draw inliers
                    for (int j = 0; j < inliers1.Count(); j++) {
                        if (i % 2 == 0) graphics.DrawLine(new Pen(Color.GreenYellow, 1.5f), new System.Drawing.Point(inliers1[j].X + i * input_images[i].Width, inliers1[j].Y), new System.Drawing.Point(inliers2[j].X + (i + 1) * input_images[i].Width, inliers2[j].Y));
                        else            graphics.DrawLine(new Pen(Color.Blue, 1.5f), new System.Drawing.Point(inliers1[j].X + i * input_images[i].Width, inliers1[j].Y), new System.Drawing.Point(inliers2[j].X + (i + 1) * input_images[i].Width, inliers2[j].Y));
                    }

                    keypoints_cntr += 2;
                }
            }

            ShowThumbnail(mergedImage, true, PanoramaPhase.MatchKeypoints);
        }
コード例 #5
0
        private Bitmap CompareAndDrawImage(Bitmap modelImage, Bitmap observedImage, SurfSettings setting)
        {
            Stopwatch watch1 = new Stopwatch();
            Stopwatch watch2 = new Stopwatch();

            Bitmap returnBitmap;

            watch2.Start();
            watch1.Reset(); watch1.Start();
            double hessianThreshold = setting.HessianThresh.HasValue ? setting.HessianThresh.Value : 500;
            float hessianThreshold2 = (float)hessianThreshold / 1000000;

            Debug.WriteLine("hessianThreshold2: {0}", hessianThreshold2);
            SpeededUpRobustFeaturesDetector surf = new SpeededUpRobustFeaturesDetector(hessianThreshold2);
            List<SpeededUpRobustFeaturePoint> surfPoints1 = surf.ProcessImage(modelImage);
            List<SpeededUpRobustFeaturePoint> surfPoints2 = surf.ProcessImage(observedImage);

            Debug.WriteLine("Surf points count: {0}", surfPoints1.Count);
            Debug.WriteLine("Surf points count: {0}", surfPoints2.Count);
            //long memoryFootprint = MemorySize.GetBlobSizeinKb(surfPoints2);
            //Debug.WriteLine("Surf extractor: {0} kb", memoryFootprint);

            watch1.Stop();
            Debug.WriteLine("Surf Detection tooked {0} ms", watch1.ElapsedMilliseconds);

            watch1.Reset(); watch1.Start();
            // Show the marked points in the original images
            Bitmap img1mark = new FeaturesMarker(surfPoints1, 2).Apply(modelImage);
            Bitmap img2mark = new FeaturesMarker(surfPoints2, 2).Apply(observedImage);
            // Concatenate the two images together in a single image (just to show on screen)
            Concatenate concatenate = new Concatenate(img1mark);
            returnBitmap = concatenate.Apply(img2mark);
            watch1.Stop();
            Debug.WriteLine("Surf point plotting tooked {0} ms", watch1.ElapsedMilliseconds);

            //watch1.Reset(); watch1.Start();
            //List<IntPoint>[] coretionalMatches = getMatches(surfPoints1, surfPoints2);
            //watch1.Stop();
            //Debug.WriteLine("Correctional Match tooked {0} ms", watch1.ElapsedMilliseconds);

            //// Get the two sets of points
            //IntPoint[] correlationPoints11 = coretionalMatches[0].ToArray();
            //IntPoint[] correlationPoints22 = coretionalMatches[1].ToArray();

            //Debug.WriteLine("Correclation points count: {0}", correlationPoints11.Length);
            //Debug.WriteLine("Correclation points count: {0}", correlationPoints22.Length);

            Debug.WriteLine("Threshold: {0}", setting.UniquenessThreshold.Value);
            watch1.Reset(); watch1.Start();
            // Step 2: Match feature points using a k-NN
            KNearestNeighborMatching matcher = new KNearestNeighborMatching(2);
            matcher.Threshold = setting.UniquenessThreshold.Value;
            IntPoint[][] matches = matcher.Match(surfPoints1, surfPoints2);
            watch1.Stop();
            Debug.WriteLine("Knn Match tooked {0} ms", watch1.ElapsedMilliseconds);

            // Get the two sets of points
            IntPoint[] correlationPoints1 = matches[0];
            IntPoint[] correlationPoints2 = matches[1];

            Debug.WriteLine("Knn points count: {0}", correlationPoints1.Length);
            Debug.WriteLine("Knn points count: {0}", correlationPoints2.Length);

            //watch1.Reset(); watch1.Start();
            //// Show the marked correlations in the concatenated image
            //PairsMarker pairs = new PairsMarker(
            //    correlationPoints1, // Add image1's width to the X points to show the markings correctly
            //    correlationPoints2.Apply(p => new IntPoint(p.X + modelImage.Width, p.Y)), Color.Blue);

            //returnBitmap = pairs.Apply(returnBitmap);
            //watch1.Stop();
            //Debug.WriteLine("Match pair marking tooked {0} ms", watch1.ElapsedMilliseconds);

            if (correlationPoints1.Length < 4 || correlationPoints2.Length < 4)
            {
                MessageBox.Show("Insufficient points to attempt a fit.");
                return null;
            }

            watch1.Reset(); watch1.Start();
            // Step 3: Create the homography matrix using a robust estimator
            //RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99);
            RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99);
            MatrixH homography = ransac.Estimate(correlationPoints1, correlationPoints2);
            watch1.Stop();
            Debug.WriteLine("Ransac tooked {0} ms", watch1.ElapsedMilliseconds);

            watch1.Reset(); watch1.Start();
            // Plot RANSAC results against correlation results
            IntPoint[] inliers1 = correlationPoints1.Submatrix(ransac.Inliers);
            IntPoint[] inliers2 = correlationPoints2.Submatrix(ransac.Inliers);
            watch1.Stop();
            Debug.WriteLine("Ransac SubMatrix {0} ms", watch1.ElapsedMilliseconds);

            Debug.WriteLine("Ransac points count: {0}", inliers1.Length);
            Debug.WriteLine("Ransac points count: {0}", inliers2.Length);

            watch1.Reset(); watch1.Start();
            PairsMarker inlierPairs = new PairsMarker(
               inliers1, // Add image1's width to the X points to show the markings correctly
               inliers2.Apply(p => new IntPoint(p.X + modelImage.Width, p.Y)), Color.Red);

            returnBitmap = inlierPairs.Apply(returnBitmap);
            watch1.Stop();
            Debug.WriteLine("Ransac plotting tooked {0} ms", watch1.ElapsedMilliseconds);

            watch2.Stop();
            return returnBitmap;
        }
コード例 #6
0
        private void BtnRansac_OnClick(object sender, RoutedEventArgs e)
        {
            // Step 3: Create the homography matrix using a robust estimator
            RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99);
            homography = ransac.Estimate(correlationPoints1, correlationPoints2);

            // Plot RANSAC results against correlation results
            IntPoint[] inliers1 = correlationPoints1.Submatrix(ransac.Inliers);
            IntPoint[] inliers2 = correlationPoints2.Submatrix(ransac.Inliers);

            // Concatenate the two images in a single image (just to show on screen)
            Concatenate concat = new Concatenate(img1);
            Bitmap img3 = concat.Apply(img2);

            // Show the marked correlations in the concatenated image
            PairsMarker pairs = new PairsMarker(
                inliers1, // Add image1's width to the X points to show the markings correctly
                inliers2.Apply(p => new IntPoint(p.X + img1.Width, p.Y)));

            PictureBox.Source = pairs.Apply(img3);
        }
コード例 #7
0
ファイル: MainForm.cs プロジェクト: xyicheng/Accord
        private void button2_Click(object sender, EventArgs e)
        {
            // Step 3: Create the homography matrix using a robust estimator
            RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99);
            homography = ransac.Estimate(correlationPoints1, correlationPoints2);

            // Plot RANSAC results against correlation results
            IntPoint[] inliers1 = correlationPoints1.Submatrix(ransac.Inliers);
            IntPoint[] inliers2 = correlationPoints2.Submatrix(ransac.Inliers);

            // Concatenate the two images in a single image (just to show on screen)
            Concatenate concat = new Concatenate(img1);
            Bitmap img3 = concat.Apply(img2);

            // Show the marked correlations in the concatenated image
            PairsMarker pairs = new PairsMarker(
                inliers1, // Add image1's width to the X points to show the markings correctly
                inliers2.Apply(p => new IntPoint(p.X + img1.Width, p.Y)));

            pictureBox1.Image = pairs.Apply(img3);

            numA.Value = (decimal)homography.Elements[0];
            numB.Value = (decimal)homography.Elements[1];
            numC.Value = (decimal)homography.Elements[2];

            numD.Value = (decimal)homography.Elements[3];
            numE.Value = (decimal)homography.Elements[4];
            numF.Value = (decimal)homography.Elements[5];

            numG.Value = (decimal)homography.Elements[6];
            numH.Value = (decimal)homography.Elements[7];
        }