Esempio n. 1
0
        private IEnumerable <Bitmap> DoRecognize(Bitmap b)
        {
            List <Bitmap> Result = new List <Bitmap>();

            var NotClassifiedFeatures        = FeatureExtractor.ExtractFeatures(b, null);
            List <SURFFeature> FeatureRemain = new List <SURFFeature>();

            Parallel.ForEach(NotClassifiedFeatures, (NotClassifiedFeature) =>
            {
                var KnnResult = knn.Classify(k, NotClassifiedFeature);

                if (KnnResult.Confidence >= ConfidenceThreshold)
                {
                    lock (FeatureRemain)
                        FeatureRemain.Add(NotClassifiedFeature);
                }
            });


            var    GroupedFeatureRemains = FeatureRemain.GroupBy(x => x.Tag);
            Bitmap FinalResult           = (Bitmap)b.Clone();
            //Draw Features
            FeaturesMarker featuresMarker = new FeaturesMarker(FeatureRemain.Select(x => x.Original));
            Bitmap         MarkedBitmap   = featuresMarker.Apply(b);

            //Draw bounding box
            foreach (var GroupedFeatureRemain in GroupedFeatureRemains)
            {
                string Tag = GroupedFeatureRemain.Key;

                Point2DGrouper point2DGrouper = new Point2DGrouper(MaximumPointGroupDistance)
                {
                    MinimumGroupCount = MinimumPointGroupCount
                };
                var BoundingBoxes = point2DGrouper
                                    .GetGrouped2DPoints(GroupedFeatureRemain.Select(f => f.GetLocation()))
                                    .GroupBy(p => p.GroupID)
                                    .Select(group => group.GetBoundingBox(MinimumPointGroupArea))
                                    .Where(bbox => bbox != null);
                var ObjectBox = BoundingBoxes.GetBoundingBox();

                Brush brush = SelectBrush(Tag);
                if (BoundingBoxes != null)
                {
                    MarkedBitmap.DrawRect(BoundingBoxes.Select(boxes => boxes.ToRectangle()), brush, 3, "Bbox:" + Tag);
                }
                if (ObjectBox != null)
                {
                    FinalResult.DrawRect(ObjectBox.ToRectangle(), brush, 3, "Object:" + Tag);
                }
            }

            FinalResult.Tag  = "Final Result";
            MarkedBitmap.Tag = "Class Bounding Boxes";
            Result.Add(FinalResult);
            Result.Add(MarkedBitmap);

            return(Result);
        }
Esempio n. 2
0
        public void ProcessImageTest4()
        {
            Bitmap[] bitmaps =
            {
                Resources.flower01,
                Resources.flower03,
                Resources.flower06,
                Resources.flower07,
                Resources.flower09,
                Resources.flower10,
            };

            var surf = new SpeededUpRobustFeaturesDetector();

            int current = 0;

            foreach (Bitmap img in bitmaps)
            {
                List <SpeededUpRobustFeaturePoint> expected;
                List <SpeededUpRobustFeaturePoint> actual;

                // Create OpenSURF detector by Chris Evans
                {
                    // Create Integral Image
                    OpenSURFcs.IntegralImage iimg = OpenSURFcs.IntegralImage.FromImage(img);

                    // Extract the interest points
                    var pts = OpenSURFcs.FastHessian.getIpoints(0.0002f, 5, 2, iimg);

                    // Describe the interest points
                    OpenSURFcs.SurfDescriptor.DecribeInterestPoints(pts, false, false, iimg);

                    expected = new List <SpeededUpRobustFeaturePoint>();
                    foreach (var p in pts)
                    {
                        expected.Add(new SpeededUpRobustFeaturePoint(p.x, p.y, p.scale,
                                                                     p.laplacian, p.orientation, p.response));
                    }
                }

                // Create Accord.NET SURF detector (based on OpenSURF by Chris Evans)
                {
                    actual = surf.ProcessImage(img);
                }

                var img1 = new FeaturesMarker(actual).Apply(img);
                var img2 = new FeaturesMarker(expected).Apply(img);

                ImageBox.Show(new Concatenate(img1).Apply(img2), PictureBoxSizeMode.Zoom);


                current++;
            }
        }
        public void ProcessImageTest4()
        {
            var bitmaps = BagOfVisualWordsTest.GetImages();
            var surf    = new SpeededUpRobustFeaturesDetector();

            int current = 0;

            foreach (Bitmap img in bitmaps)
            {
                List <SpeededUpRobustFeaturePoint> expected;
                List <SpeededUpRobustFeaturePoint> actual;

                // Create OpenSURF detector by Chris Evans
                {
                    // Create Integral Image
                    OpenSURFcs.IntegralImage iimg = OpenSURFcs.IntegralImage.FromImage(img);

                    // Extract the interest points
                    var pts = OpenSURFcs.FastHessian.getIpoints(0.0002f, 5, 2, iimg);

                    // Describe the interest points
                    OpenSURFcs.SurfDescriptor.DecribeInterestPoints(pts, false, false, iimg);

                    expected = new List <SpeededUpRobustFeaturePoint>();
                    foreach (var p in pts)
                    {
                        expected.Add(new SpeededUpRobustFeaturePoint(p.x, p.y, p.scale,
                                                                     p.laplacian, p.orientation, p.response));
                    }
                }

                // Create Accord.NET SURF detector (based on OpenSURF by Chris Evans)
                {
                    actual = surf.ProcessImage(img);
                }

                var img1 = new FeaturesMarker(actual).Apply(img);
                var img2 = new FeaturesMarker(expected).Apply(img);

                // ImageBox.Show(new Concatenate(img1).Apply(img2), PictureBoxSizeMode.Zoom);


                current++;

                for (int i = 0; i < expected.Count; i++)
                {
                    var e = expected[i];
                    var a = actual[i];
                    Assert.AreEqual(e, a);
                }
            }
        }
        public void ExampleTest()
        {
            Bitmap lena = Accord.Imaging.Image.Clone(Resources.lena512);

            // The freak detector can be used with any other corners detection
            // algorithm. The default corners detection method used is the FAST
            // corners detection. So, let's start creating this detector first:
            //
            var detector = new FastCornersDetector(60);

            // Now that we have a corners detector, we can pass it to the FREAK
            // feature extraction algorithm. Please note that if we leave this
            // parameter empty, FAST will be used by default.
            //
            var freak = new FastRetinaKeypointDetector(detector);

            // Now, all we have to do is to process our image:
            List <FastRetinaKeypoint> points = freak.ProcessImage(lena);

            // Afterwards, we should obtain 83 feature points. We can inspect
            // the feature points visually using the FeaturesMarker class as
            //
            FeaturesMarker marker = new FeaturesMarker(points, scale: 20);

            // And showing it on screen with
            // ImageBox.Show(marker.Apply(lena));

            // We can also inspect the feature vectors (descriptors) associated
            // with each feature point. In order to get a descriptor vector for
            // any given point, we can use
            //
            byte[] feature = points[42].Descriptor;

            // By default, feature vectors will have 64 bytes in length. We can also
            // display those vectors in more readable formats such as HEX or base64
            //
            string hex = points[42].ToHex();
            string b64 = points[42].ToBase64();

            // The above base64 result should be:
            //
            //  "3W8M/ev///ffbr/+v3f34vz//7X+f0609v//+++/1+jfq/e83/X5/+6ft3//b4uaPZf7ePb3n/P93/rIbZlf+g=="
            //

            Assert.AreEqual(83, points.Count);
            Assert.AreEqual(64, feature.Length);
            Assert.AreEqual("3W8M/ev///ffbr/+v3f34vz//7X+f0609v//+++/1+jfq/e83/X5/+6ft3//b4uaPZf7ePb3n/P93/rIbZlf+g==", b64);
        }
Esempio n. 5
0
        private void surf()
        {
            var surf = new Accord.Imaging.SpeededUpRobustFeaturesDetector();

            surfPoints1 = surf.ProcessImage(img1).ToArray();
            surfPoints2 = surf.ProcessImage(img2).ToArray();

            // Show the marked points in the original images
            Bitmap img1mark = new FeaturesMarker(surfPoints1).Apply(img1);
            Bitmap img2mark = new FeaturesMarker(surfPoints2).Apply(img2);

            // Concatenate the two images together in a single image (just to show on screen)
            Concatenate concatenate = new Concatenate(img1mark);

            pictureBox1.Image = concatenate.Apply(img2mark);
        }
Esempio n. 6
0
        private void btnSurf_Click(object sender, EventArgs e)
        {
            // Step 1: Detect feature points using Surf Corners Detector
            SpeededUpRobustFeaturesDetector surf = new SpeededUpRobustFeaturesDetector();

            surfPoints1 = surf.Transform(img1);
            surfPoints2 = surf.Transform(img2);

            // Show the marked points in the original images
            Bitmap img1mark = new FeaturesMarker(surfPoints1).Apply(img1);
            Bitmap img2mark = new FeaturesMarker(surfPoints2).Apply(img2);

            // Concatenate the two images together in a single image (just to show on screen)
            Concatenate concatenate = new Concatenate(img1mark);

            pictureBox.Image = concatenate.Apply(img2mark);
        }
Esempio n. 7
0
        private void DetectButton_OnClick(object sender, RoutedEventArgs e)
        {
            var threshold = (float)Math.Pow(10.0, this.LogThresholdSlider.Value);
            var octaves   = (int)this.OctaveSlider.Value;
            var initial   = (int)this.InitialSlider.Value;

            // Create a new SURF Features Detector using the given parameters
            var surf = new SpeededUpRobustFeaturesDetector(threshold, octaves, initial);

            var points = surf.ProcessImage(this.lenna);

            // Create a new AForge's Corner Marker Filter
            var features = new FeaturesMarker(points);

            // Apply the filter and display it on a picturebox
            this.LenaImage.Source = (BitmapSource)features.Apply(this.lenna);
        }
Esempio n. 8
0
        private void button1_Click(object sender, EventArgs e)
        {
            // Open a image
            Bitmap lenna = Properties.Resources.lena512;

            float threshold = (float)numThreshold.Value;
            int   octaves   = (int)numOctaves.Value;
            int   initial   = (int)numInitial.Value;

            // Create a new SURF Features Detector using the given parameters
            SpeededUpRobustFeaturesDetector surf =
                new SpeededUpRobustFeaturesDetector(threshold, octaves, initial);

            List <SpeededUpRobustFeaturePoint> points = surf.ProcessImage(lenna);

            // Create a new AForge's Corner Marker Filter
            FeaturesMarker features = new FeaturesMarker(points);

            // Apply the filter and display it on a picturebox
            pictureBox1.Image = features.Apply(lenna);
        }
Esempio n. 9
0
        private void button1_Click(object sender, EventArgs e)
        {
            // Open a image
            Bitmap lenna = Surf.Properties.Resources.lena512;

            // Create a new SURF Features Detector using the given parameters
            SpeededUpRobustFeaturesDetector surf =
                new SpeededUpRobustFeaturesDetector(0.0002f, 5, 2);

            var points = surf.ProcessImage(lenna);

            // Get the SURF Features Descriptor from the detector
            SurfDescriptor descriptor = surf.GetDescriptor();

            descriptor.Describe(points);

            // Create a new AForge's Corner Marker Filter
            FeaturesMarker features = new FeaturesMarker(points.ToArray());

            // Apply the filter and display it on a picturebox
            pictureBox1.Image = features.Apply(lenna);
        }
        private Bitmap CompareAndDrawImage(Bitmap modelImage, Bitmap observedImage, SurfSettings setting)
        {
            Stopwatch watch1 = new Stopwatch();
            Stopwatch watch2 = new Stopwatch();

            Bitmap returnBitmap;

            watch2.Start();
            watch1.Reset(); watch1.Start();
            double hessianThreshold  = setting.HessianThresh.HasValue ? setting.HessianThresh.Value : 500;
            float  hessianThreshold2 = (float)hessianThreshold / 1000000;

            Debug.WriteLine("hessianThreshold2: {0}", hessianThreshold2);
            SpeededUpRobustFeaturesDetector    surf        = new SpeededUpRobustFeaturesDetector(hessianThreshold2);
            List <SpeededUpRobustFeaturePoint> surfPoints1 = surf.ProcessImage(modelImage);
            List <SpeededUpRobustFeaturePoint> surfPoints2 = surf.ProcessImage(observedImage);


            Debug.WriteLine("Surf points count: {0}", surfPoints1.Count);
            Debug.WriteLine("Surf points count: {0}", surfPoints2.Count);
            //long memoryFootprint = MemorySize.GetBlobSizeinKb(surfPoints2);
            //Debug.WriteLine("Surf extractor: {0} kb", memoryFootprint);

            watch1.Stop();
            Debug.WriteLine("Surf Detection tooked {0} ms", watch1.ElapsedMilliseconds);

            watch1.Reset(); watch1.Start();
            // Show the marked points in the original images
            Bitmap img1mark = new FeaturesMarker(surfPoints1, 2).Apply(modelImage);
            Bitmap img2mark = new FeaturesMarker(surfPoints2, 2).Apply(observedImage);
            // Concatenate the two images together in a single image (just to show on screen)
            Concatenate concatenate = new Concatenate(img1mark);

            returnBitmap = concatenate.Apply(img2mark);
            watch1.Stop();
            Debug.WriteLine("Surf point plotting tooked {0} ms", watch1.ElapsedMilliseconds);


            //watch1.Reset(); watch1.Start();
            //List<IntPoint>[] coretionalMatches = getMatches(surfPoints1, surfPoints2);
            //watch1.Stop();
            //Debug.WriteLine("Correctional Match tooked {0} ms", watch1.ElapsedMilliseconds);

            //// Get the two sets of points
            //IntPoint[] correlationPoints11 = coretionalMatches[0].ToArray();
            //IntPoint[] correlationPoints22 = coretionalMatches[1].ToArray();

            //Debug.WriteLine("Correclation points count: {0}", correlationPoints11.Length);
            //Debug.WriteLine("Correclation points count: {0}", correlationPoints22.Length);

            Debug.WriteLine("Threshold: {0}", setting.UniquenessThreshold.Value);
            watch1.Reset(); watch1.Start();
            // Step 2: Match feature points using a k-NN
            KNearestNeighborMatching matcher = new KNearestNeighborMatching(2);

            matcher.Threshold = setting.UniquenessThreshold.Value;
            IntPoint[][] matches = matcher.Match(surfPoints1, surfPoints2);
            watch1.Stop();
            Debug.WriteLine("Knn Match tooked {0} ms", watch1.ElapsedMilliseconds);

            // Get the two sets of points
            IntPoint[] correlationPoints1 = matches[0];
            IntPoint[] correlationPoints2 = matches[1];

            Debug.WriteLine("Knn points count: {0}", correlationPoints1.Length);
            Debug.WriteLine("Knn points count: {0}", correlationPoints2.Length);

            //watch1.Reset(); watch1.Start();
            //// Show the marked correlations in the concatenated image
            //PairsMarker pairs = new PairsMarker(
            //    correlationPoints1, // Add image1's width to the X points to show the markings correctly
            //    correlationPoints2.Apply(p => new IntPoint(p.X + modelImage.Width, p.Y)), Color.Blue);

            //returnBitmap = pairs.Apply(returnBitmap);
            //watch1.Stop();
            //Debug.WriteLine("Match pair marking tooked {0} ms", watch1.ElapsedMilliseconds);

            if (correlationPoints1.Length < 4 || correlationPoints2.Length < 4)
            {
                MessageBox.Show("Insufficient points to attempt a fit.");
                return(null);
            }

            watch1.Reset(); watch1.Start();
            // Step 3: Create the homography matrix using a robust estimator
            //RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99);
            RansacHomographyEstimator ransac = new RansacHomographyEstimator(0.001, 0.99);
            MatrixH homography = ransac.Estimate(correlationPoints1, correlationPoints2);

            watch1.Stop();
            Debug.WriteLine("Ransac tooked {0} ms", watch1.ElapsedMilliseconds);

            watch1.Reset(); watch1.Start();
            // Plot RANSAC results against correlation results
            IntPoint[] inliers1 = correlationPoints1.Submatrix(ransac.Inliers);
            IntPoint[] inliers2 = correlationPoints2.Submatrix(ransac.Inliers);
            watch1.Stop();
            Debug.WriteLine("Ransac SubMatrix {0} ms", watch1.ElapsedMilliseconds);

            Debug.WriteLine("Ransac points count: {0}", inliers1.Length);
            Debug.WriteLine("Ransac points count: {0}", inliers2.Length);

            watch1.Reset(); watch1.Start();
            PairsMarker inlierPairs = new PairsMarker(
                inliers1, // Add image1's width to the X points to show the markings correctly
                inliers2.Apply(p => new IntPoint(p.X + modelImage.Width, p.Y)), Color.Red);

            returnBitmap = inlierPairs.Apply(returnBitmap);
            watch1.Stop();
            Debug.WriteLine("Ransac plotting tooked {0} ms", watch1.ElapsedMilliseconds);

            watch2.Stop();
            return(returnBitmap);
        }
Esempio n. 11
0
        private void process1()
        {
            var bitmap1 = (Bitmap)sourcebox1.Image;
            var bitmap2 = (Bitmap)sourcebox2.Image;
            var hash1   = ImagePhash.ComputeDigest(bitmap1.ToLuminanceImage());
            var hash2   = ImagePhash.ComputeDigest(bitmap2.ToLuminanceImage());
            var score   = ImagePhash.GetCrossCorrelation(hash1, hash2);

            Console.WriteLine("score: {0}", score);

            //threshold value
            var thres = new Threshold(110);

            Grayscale filter = new Grayscale(0.2125, 0.7154, 0.0721);
            // apply the filter to the model
            Bitmap grey1 = filter.Apply(bitmap1);

            thres.ApplyInPlace(grey1);

            // Apply the filter to the observed image
            Bitmap grey2 = filter.Apply(bitmap2);

            thres.ApplyInPlace(grey2);

            int modelPoints = 0, matchingPoints = 0;

            var skewChecker     = new DocumentSkewChecker();
            var angle1          = skewChecker.GetSkewAngle(grey1);
            var rotationFilter1 = new RotateBicubic(-angle1);

            rotationFilter1.FillColor = Color.White;
            grey1 = rotationFilter1.Apply(grey1);

            var angle2          = skewChecker.GetSkewAngle(grey2);
            var rotationFilter2 = new RotateBicubic(-angle2);

            rotationFilter2.FillColor = Color.White;
            grey2 = rotationFilter2.Apply(grey2);

            //CorrelationMatching matcher = new CorrelationMatching(5, grey1, grey2);
            //var results = matcher.GetHashCode();
            var detector = new FastCornersDetector(15);
            var freak    = new FastRetinaKeypointDetector(detector);

            FastRetinaKeypoint[] features1 = freak.Transform(grey1).ToArray();
            modelPoints = features1.Count();

            Console.WriteLine("count: {0}", modelPoints);

            FastRetinaKeypoint[] features2 = freak.Transform(grey2).ToArray();

            Console.WriteLine("count: {0}", features2.Count());

            KNearestNeighborMatching matcher = new KNearestNeighborMatching(7);

            //var length = 0;

            IntPoint[][] results = matcher.Match(features1, features2);
            matchingPoints = results[0].Count(); // similarity of image1 to image2
            ////matchingPoints = results[1].Count(); // similarity of image2 to image1

            Console.WriteLine("matched points: {0}", matchingPoints);

            sourcebox1.Image = bitmap1;
            sourcebox2.Image = bitmap2;
            var marker1 = new FeaturesMarker(features1, 30);
            var marker2 = new FeaturesMarker(features2, 30);



            double similPercent = 0;

            if (matchingPoints <= 0)
            {
                similPercent = 0.0f;
            }
            similPercent = (matchingPoints * 100d) / (double)modelPoints;

            Console.WriteLine("score: {0}", similPercent);

            simil1.Text = similPercent.ToString("##.##") + "%";
            simil2.Text = (score * 100.00d).ToString("##.##") + "%";

            angle_text.Text  = angle2.ToString("##.##") + "°";
            resultbox1.Image = marker1.Apply(grey1);
            resultbox2.Image = marker2.Apply(grey2);
        }
Esempio n. 12
0
        private void cornerDetectorToolStripMenuItem_Click(object sender, EventArgs e)
        {
            Bitmap image = new Bitmap(System.Drawing.Image.FromFile(@"C:\OSU3\UAS\Training Videos\ZoltanTestSets\test\DJI_0247.rd.JPG"));

            float threshold = 0.001f;

            /*int octaves = (int)numOctaves.Value;
            *  int initial = (int)numInitial.Value;*/

            ResizeNearestNeighbor filter = new ResizeNearestNeighbor(600, 400);

            image = filter.Apply(image);

            // Create a new SURF Features Detector using the given parameters
            SpeededUpRobustFeaturesDetector surf =
                new SpeededUpRobustFeaturesDetector(threshold);

            List <SpeededUpRobustFeaturePoint> points = surf.ProcessImage(image);

            // Create a new AForge's Corner Marker Filter
            FeaturesMarker features = new FeaturesMarker(points, 5);

            // Apply the filter and display it on a picturebox
            Bitmap image_feature = features.Apply(image);


            SingleImageWnd form = new SingleImageWnd(image_feature);

            form.MdiParent = this;
            form.Show();

            /*Bitmap image = new Bitmap(System.Drawing.Image.FromFile(@"C:\OSU3\CAR\Reports\Targets\Target1\DSC00217.JPG"));
             * Bitmap image2 = new Bitmap(image.Width, image.Height);
             *
             * using (Graphics g = Graphics.FromImage(image2))
             * {
             *  g.DrawImage(image, 0, 0);
             *
             *  Grayscale filter = new Grayscale(.33, .33, .33);
             *  image = filter.Apply(image);
             *
             *  double sigma = 2;
             *  float k = 4 / 10;
             *  float threshold = 500;
             *
             *  // Create a new Harris Corners Detector using the given parameters
             *  HarrisCornersDetector harris = new HarrisCornersDetector(k)
             *  {
             *      Measure = HarrisCornerMeasure.Harris, //: HarrisCornerMeasure.Noble,
             *      Threshold = threshold,
             *      Sigma = sigma
             *  };
             *
             *  List<IntPoint> corners = harris.ProcessImage(image);
             *
             *  foreach (IntPoint corner in corners)
             *  {
             *      //A circle with Red Color and 2 Pixel wide line
             *      //gf.DrawEllipse(new Pen(Color.Red, 2), new Rectangle(0, 0, 200, 200));
             *      DrawCircle(g, new Pen(Color.Red, 1), corner.X, corner.Y, 1);
             *  }
             *
             * }
             *
             * ImageForm form = new ImageForm(image2);
             * form.MdiParent = this;
             * form.Show();*/
        }