public void ProcessImageTest2()
        {
            UnmanagedImage image = UnmanagedImage.FromManagedImage(Properties.Resources.lena512);

            FastCornersDetector target = new FastCornersDetector();

            target.Suppress  = true;
            target.Threshold = 40;

            List <IntPoint> actual = target.ProcessImage(image);

            /*
             *          PointsMarker marker = new PointsMarker(actual.ToArray());
             *          marker.Width = 3;
             *          marker.MarkerColor = Color.FromArgb(255, 0, 0);
             *          var markers = marker.Apply(image);
             *          ImageBox.Show(markers.ToManagedImage(), PictureBoxSizeMode.Zoom);
             */

            Assert.AreEqual(324, actual.Count);
            Assert.AreEqual(506, actual[0].X);
            Assert.AreEqual(4, actual[0].Y);
            Assert.AreEqual(152, actual[6].X);
            Assert.AreEqual(75, actual[6].Y);
            Assert.AreEqual(416, actual[11].X);
            Assert.AreEqual(115, actual[11].Y);
            Assert.AreEqual(140, actual[65].X);
            Assert.AreEqual(246, actual[65].Y);
            Assert.AreEqual(133, actual[73].X);
            Assert.AreEqual(253, actual[73].Y);
        }
Example #2
0
        /// <summary>
        /// Features from Accelerated Segment Test (FAST) corners detector.
        /// <para>Accord.NET internal call. Please see: <see cref="Accord.Imaging.FastCornersDetector"/> for details.</para>
        /// </summary>
        /// <param name="im">Image.</param>
        /// <param name="threshold">The suppression threshold. Decreasing this value increases the number of points detected by the algorithm.</param>
        /// <returns>Interest point locations.</returns>
        public static List <IntPoint> CornerFeaturesDetector(this Image <Gray, byte> im, int threshold = 20)
        {
            FastCornersDetector fast = new FastCornersDetector(threshold);
            var points = fast.ProcessImage(im.ToAForgeImage(copyAlways: false, failIfCannotCast: true));

            return(points);
        }
        public void MatchTest3()
        {
            FastCornersDetector fast = new FastCornersDetector(threshold: 10);

            FastRetinaKeypointDetector freak = new FastRetinaKeypointDetector(fast);

            var keyPoints1 = freak.ProcessImage(Properties.Resources.old).ToArray();
            var keyPoints2 = freak.ProcessImage(Properties.Resources.flower01).ToArray();

            var matcher = new KNearestNeighborMatching <byte[]>(5, Distance.BitwiseHamming);

            { // direct
                IntPoint[][] matches = matcher.Match(keyPoints1, keyPoints2);
                Assert.AreEqual(2, matches.Length);
                Assert.AreEqual(138, matches[0].Length);
                Assert.AreEqual(138, matches[1].Length);
                Assert.AreEqual(532, matches[0][0].X);
                Assert.AreEqual(159, matches[0][0].Y);
                Assert.AreEqual(keyPoints2[0].ToIntPoint(), matches[1][0]);
            }

            { // reverse
                IntPoint[][] matches = matcher.Match(keyPoints2, keyPoints1);
                Assert.AreEqual(2, matches.Length);
                Assert.AreEqual(138, matches[0].Length);
                Assert.AreEqual(138, matches[1].Length);
                Assert.AreEqual(keyPoints2[0].ToIntPoint(), matches[0][0]);
                Assert.AreEqual(532, matches[1][0].X);
                Assert.AreEqual(159, matches[1][0].Y);
            }
        }
        public void ProcessImageTest()
        {
            UnmanagedImage image = UnmanagedImage.FromManagedImage(Properties.Resources.sample_black);

            FastCornersDetector target = new FastCornersDetector();

            target.Suppress  = false;
            target.Threshold = 20;

            List <IntPoint> actual = target.ProcessImage(image);

            /*
             *          PointsMarker marker = new PointsMarker(actual.ToArray());
             *          marker.Width = 3;
             *          marker.MarkerColor = Color.FromArgb(255, 0, 0);
             *          var markers = marker.Apply(image);
             *          ImageBox.Show(markers.ToManagedImage(), PictureBoxSizeMode.Zoom);
             */

            Assert.AreEqual(237, actual.Count);
            Assert.AreEqual(404, actual[0].X);
            Assert.AreEqual(35, actual[0].Y);
            Assert.AreEqual(407, actual[6].X);
            Assert.AreEqual(36, actual[6].Y);
            Assert.AreEqual(407, actual[11].X);
            Assert.AreEqual(38, actual[11].Y);
            Assert.AreEqual(55, actual[65].X);
            Assert.AreEqual(135, actual[65].Y);
            Assert.AreEqual(103, actual[73].X);
            Assert.AreEqual(137, actual[73].Y);
        }
Example #5
0
        /// <summary>
        /// Features from Accelerated Segment Test (FAST) corners detector.
        /// <para>Accord.NET internal call. Please see: <see cref="Accord.Imaging.FastCornersDetector"/> for details.</para>
        /// </summary>
        /// <param name="im">Image.</param>
        /// <param name="threshold">The suppression threshold. Decreasing this value increases the number of points detected by the algorithm.</param>
        /// <returns>Interest point locations.</returns>
        public static List <IntPoint> CornerFeaturesDetector(this Gray <byte>[,] im, int threshold = 20)
        {
            FastCornersDetector fast = new FastCornersDetector(threshold);

            List <IntPoint> points;

            using (var uImg = im.Lock())
            {
                points = fast.ProcessImage(uImg.AsAForgeImage());
            }

            return(points);
        }
        public void ExampleTest()
        {
            Bitmap lena = Accord.Imaging.Image.Clone(Resources.lena512);

            // The freak detector can be used with any other corners detection
            // algorithm. The default corners detection method used is the FAST
            // corners detection. So, let's start creating this detector first:
            //
            var detector = new FastCornersDetector(60);

            // Now that we have a corners detector, we can pass it to the FREAK
            // feature extraction algorithm. Please note that if we leave this
            // parameter empty, FAST will be used by default.
            //
            var freak = new FastRetinaKeypointDetector(detector);

            // Now, all we have to do is to process our image:
            List <FastRetinaKeypoint> points = freak.ProcessImage(lena);

            // Afterwards, we should obtain 83 feature points. We can inspect
            // the feature points visually using the FeaturesMarker class as
            //
            FeaturesMarker marker = new FeaturesMarker(points, scale: 20);

            // And showing it on screen with
            // ImageBox.Show(marker.Apply(lena));

            // We can also inspect the feature vectors (descriptors) associated
            // with each feature point. In order to get a descriptor vector for
            // any given point, we can use
            //
            byte[] feature = points[42].Descriptor;

            // By default, feature vectors will have 64 bytes in length. We can also
            // display those vectors in more readable formats such as HEX or base64
            //
            string hex = points[42].ToHex();
            string b64 = points[42].ToBase64();

            // The above base64 result should be:
            //
            //  "3W8M/ev///ffbr/+v3f34vz//7X+f0609v//+++/1+jfq/e83/X5/+6ft3//b4uaPZf7ePb3n/P93/rIbZlf+g=="
            //

            Assert.AreEqual(83, points.Count);
            Assert.AreEqual(64, feature.Length);
            Assert.AreEqual("3W8M/ev///ffbr/+v3f34vz//7X+f0609v//+++/1+jfq/e83/X5/+6ft3//b4uaPZf7ePb3n/P93/rIbZlf+g==", b64);
        }
Example #7
0
    // Start is called before the first frame update
    void Start()
    {
        //TestImages t = new TestImages("lena");

        Bitmap image = BitmapConversion.Texture2DToBitmap(inputTex);

        FastCornersDetector fast = new FastCornersDetector(20);



        // Create a new AForge's Corner Marker Filter
        CornersMarker corners = new CornersMarker(fast, System.Drawing.Color.White);

        corners.ApplyInPlace(image);
        // Apply the filter and display it on a picturebox
        rawImage.texture = BitmapConversion.BitmapToTexture2D(image);
    }
        public void GetCorners(int threshold, bool supress)
        {
            // create corners detector's instance
            FastCornersDetector fcd = new FastCornersDetector()
            {
                Suppress  = supress,
                Threshold = threshold
            };

            // Apply the filter and return the points
            List <IntPoint> corners = fcd.ProcessImage(AForge.Imaging.Image.FromFile(this.CurrentImage));

            if (ImageComplete != null)
            {
                ImageComplete(corners);
            }
        }
Example #9
0
        private void button1_Click(object sender, EventArgs e)
        {
            // Open a image
            Bitmap lenna = Resources.lena512;

            // Create a new SURF Features Detector using the given parameters
            FastCornersDetector fast = new FastCornersDetector();

            fast.Threshold = (int)numThreshold.Value;
            fast.Suppress  = true;

            // Create a new AForge's Corner Marker Filter
            CornersMarker corners = new CornersMarker(fast, Color.White);

            // Apply the filter and display it on a picturebox
            pictureBox1.Image = corners.Apply(lenna);
        }
        public void batch_test()
        {
            Bitmap[] images =
            {
                Accord.Imaging.Image.Clone(Resources.flower01),
                Accord.Imaging.Image.Clone(Resources.flower02),
                Accord.Imaging.Image.Clone(Resources.flower03),
                Accord.Imaging.Image.Clone(Resources.flower04),
                Accord.Imaging.Image.Clone(Resources.flower05),
                Accord.Imaging.Image.Clone(Resources.flower06),
            };

            FastCornersDetector target = new FastCornersDetector();

            for (int i = 0; i < images.Length; i++)
            {
                List <IntPoint> actual = target.ProcessImage(images[i]);
                Assert.IsNotNull(actual);
            }
        }
Example #11
0
        public void SerializeTest2()
        {
            var images = BagOfVisualWordsTest.images.DeepClone();

            Accord.Math.Tools.SetupGenerator(0);

            FastCornersDetector fast = new FastCornersDetector();

            FastRetinaKeypointDetector freak = new FastRetinaKeypointDetector(fast);

            var kmodes = new KModes <byte>(5, Distance.BitwiseHamming);

            var bow = new BagOfVisualWords <FastRetinaKeypoint, byte[]>(freak, kmodes);

            bow.Compute(images);


            double[][] expected = new double[images.Length][];
            for (int i = 0; i < expected.Length; i++)
            {
                expected[i] = bow.GetFeatureVector(images[i]);
            }

            MemoryStream    stream = new MemoryStream();
            BinaryFormatter fmt    = new BinaryFormatter();

            fmt.Serialize(stream, bow);
            stream.Seek(0, SeekOrigin.Begin);
            bow = (BagOfVisualWords <FastRetinaKeypoint, byte[]>)fmt.Deserialize(stream);

            double[][] actual = new double[expected.Length][];
            for (int i = 0; i < actual.Length; i++)
            {
                actual[i] = bow.GetFeatureVector(images[i]);
            }


            Assert.IsTrue(expected.IsEqual(actual));
        }
Example #12
0
        public void SerializeTest2()
        {
            var images = GetImages();

            Accord.Math.Random.Generator.Seed = 0;

            FastCornersDetector fast = new FastCornersDetector();

            FastRetinaKeypointDetector freak = new FastRetinaKeypointDetector(fast);

            var kmodes = new KModes <byte>(5, new Hamming());

            kmodes.ParallelOptions.MaxDegreeOfParallelism = 1;

            var bow = new BagOfVisualWords <FastRetinaKeypoint, byte[]>(freak, kmodes);

            bow.Compute(images);

            double[][] expected = new double[images.Length][];
            for (int i = 0; i < expected.Length; i++)
            {
                expected[i] = bow.GetFeatureVector(images[i]);
            }

            MemoryStream    stream = new MemoryStream();
            BinaryFormatter fmt    = new BinaryFormatter();

            fmt.Serialize(stream, bow);
            stream.Seek(0, SeekOrigin.Begin);
            bow = (BagOfVisualWords <FastRetinaKeypoint, byte[]>)fmt.Deserialize(stream);

            double[][] actual = new double[expected.Length][];
            for (int i = 0; i < actual.Length; i++)
            {
                actual[i] = bow.GetFeatureVector(images[i]);
            }

            Assert.IsTrue(expected.IsEqual(actual));
        }
        public void doc_test()
        {
            string localPath = TestContext.CurrentContext.TestDirectory;

            #region doc_apply
            // Let's load an example image, such as Lena,
            // from a standard dataset of example images:
            var    images = new TestImages(path: localPath);
            Bitmap lena   = images["lena.bmp"];

            // Create FAST with the default parameter values:
            var fast = new FastCornersDetector(threshold: 20);

            // Use it to extract interest points from the Lena image:
            List <IntPoint> descriptors = fast.ProcessImage(lena);

            // Now those descriptors can be used to represent the image itself, such
            // as for example, in the Bag-of-Visual-Words approach for classification.
            #endregion

            Assert.AreEqual(1144, descriptors.Count);
        }
        public void ProcessImageTest()
        {
            UnmanagedImage image = UnmanagedImage.FromManagedImage(Accord.Imaging.Image.Clone(Resources.sample_black));

            FastCornersDetector target = new FastCornersDetector();

            target.Suppress  = false;
            target.Threshold = 20;

            List <IntPoint> actual = target.ProcessImage(image);

            Assert.AreEqual(237, actual.Count);
            Assert.AreEqual(404, actual[0].X);
            Assert.AreEqual(35, actual[0].Y);
            Assert.AreEqual(407, actual[6].X);
            Assert.AreEqual(36, actual[6].Y);
            Assert.AreEqual(407, actual[11].X);
            Assert.AreEqual(38, actual[11].Y);
            Assert.AreEqual(55, actual[65].X);
            Assert.AreEqual(135, actual[65].Y);
            Assert.AreEqual(103, actual[73].X);
            Assert.AreEqual(137, actual[73].Y);
        }
        public void ProcessImageTest2()
        {
            UnmanagedImage image = UnmanagedImage.FromManagedImage(Accord.Imaging.Image.Clone(Resources.lena512));

            FastCornersDetector target = new FastCornersDetector();

            target.Suppress  = true;
            target.Threshold = 40;

            List <IntPoint> actual = target.ProcessImage(image);

            Assert.AreEqual(324, actual.Count);
            Assert.AreEqual(506, actual[0].X);
            Assert.AreEqual(4, actual[0].Y);
            Assert.AreEqual(152, actual[6].X);
            Assert.AreEqual(75, actual[6].Y);
            Assert.AreEqual(416, actual[11].X);
            Assert.AreEqual(115, actual[11].Y);
            Assert.AreEqual(140, actual[65].X);
            Assert.AreEqual(246, actual[65].Y);
            Assert.AreEqual(133, actual[73].X);
            Assert.AreEqual(253, actual[73].Y);
        }
Example #16
0
        public void MatchTest3()
        {
            Accord.Math.Random.Generator.Seed = 0;

            var old      = Accord.Imaging.Image.Clone(Resources.old);
            var flower01 = Accord.Imaging.Image.Clone(Resources.flower01);

            FastCornersDetector fast = new FastCornersDetector(threshold: 10);

            FastRetinaKeypointDetector freak = new FastRetinaKeypointDetector(fast);

            var keyPoints1 = freak.ProcessImage(old).ToArray();
            var keyPoints2 = freak.ProcessImage(flower01).ToArray();

            var matcher = new KNearestNeighborMatching <byte[]>(5, new Hamming());

            { // direct
                IntPoint[][] matches = matcher.Match(keyPoints1, keyPoints2);
                Assert.AreEqual(2, matches.Length);
                Assert.AreEqual(143, matches[0].Length);
                Assert.AreEqual(143, matches[1].Length);
                Assert.AreEqual(532, matches[0][0].X);
                Assert.AreEqual(159, matches[0][0].Y);
                Assert.AreEqual(keyPoints2[0].ToIntPoint(), matches[1][0]);
            }

            { // reverse
                IntPoint[][] matches = matcher.Match(keyPoints2, keyPoints1);
                Assert.AreEqual(2, matches.Length);
                Assert.AreEqual(143, matches[0].Length);
                Assert.AreEqual(143, matches[1].Length);
                Assert.AreEqual(keyPoints2[0].ToIntPoint(), matches[0][0]);
                Assert.AreEqual(532, matches[1][0].X);
                Assert.AreEqual(159, matches[1][0].Y);
            }
        }
        /// <summary>
        /// takes the video and process it two frames at a time to calculate
        /// optical flow features and save them on the disk.
        /// </summary>
        /// <param name="vid">Path of the video on the disk.</param>
        /// <param name="save_path">Path to save the features on the disk.</param>
        /// <returns></returns>
        public void Extract_Featurers2(String vid, String save_path)
        {
            int mm = 0;

            try
            {
                mag          = new Mat();
                ang          = new Mat();
                frame        = new Mat();
                prev_frame   = new Mat();
                cap          = new VideoCapture(vid);
                total_frames = Convert.ToInt32(cap.GetCaptureProperty(CapProp.FrameCount));
                F_L          = new List <int>();


                frame      = cap.QueryFrame();
                prev_frame = frame;

                Console.WriteLine(total_frames);
            }
            catch (NullReferenceException except)
            {
                Console.WriteLine(except.Message);
            }
            //17900
            while (mm < total_frames - 2)
            {
                try
                {
                    prev_frame = frame;
                    frame      = cap.QueryFrame();

                    Bitmap image = new Bitmap(frame.Bitmap);

                    // Create a new FAST Corners Detector
                    FastCornersDetector fast = new FastCornersDetector()
                    {
                        Suppress  = true, // suppress non-maximum points
                        Threshold = 70    // less leads to more corners
                    };

                    // Process the image looking for corners
                    List <IntPoint> points = fast.ProcessImage(image);

                    // Create a filter to mark the corners
                    PointsMarker marker = new PointsMarker(points);

                    // Apply the corner-marking filter
                    Bitmap markers = marker.Apply(image);

                    // Show on the screen
                    //Accord.Controls.ImageBox.Show(markers);

                    // Use it to extract interest points from the Lena image:
                    List <IntPoint> descriptors = fast.ProcessImage(image);
                    PointF[]        features    = new PointF[descriptors.Count];

                    int c = 0;
                    foreach (IntPoint p in descriptors)
                    {
                        features[c] = new PointF(p.X, p.Y);
                        c++;
                    }

                    ImageViewer viewer = new ImageViewer();

                    Image <Gray, Byte> prev_grey_img = new Image <Gray, byte>(frame.Width, frame.Height);
                    Image <Gray, Byte> curr_grey_img = new Image <Gray, byte>(frame.Width, frame.Height);
                    curr_grey_img = frame.ToImage <Gray, byte>();
                    prev_grey_img = prev_frame.ToImage <Gray, Byte>();

                    PointF[] shiftedFeatures;
                    Byte[]   status;
                    float[]  trackErrors;

                    CvInvoke.CalcOpticalFlowPyrLK(prev_grey_img, curr_grey_img, features, new Size(9, 9), 3, new MCvTermCriteria(20, 0.05),
                                                  out shiftedFeatures, out status, out trackErrors);



                    //Image<Gray, Byte> displayImage = cap.QueryFrame().ToImage<Gray, Byte>();
                    //for (int i = 0; i < features.Length; i++)
                    //    displayImage.Draw(new LineSegment2DF(features[i], shiftedFeatures[i]), new Gray(), 2);


                    for (int i = 0; i < features.Length; i++)
                    {
                        CvInvoke.Circle(frame, System.Drawing.Point.Round(shiftedFeatures[i]), 4, new MCvScalar(0, 255, 255), 2);
                    }

                    int mean_X = 0;
                    int mean_Y = 0;

                    foreach (PointF p in shiftedFeatures)
                    {
                        mean_X += (int)p.X;
                        mean_Y += (int)p.Y;
                    }

                    mean_X /= shiftedFeatures.Length;
                    mean_Y /= shiftedFeatures.Length;

                    F_L.Add(mean_X);
                    F_L.Add(mean_Y);


                    //double[] inner = new double[] { mean_X, mean_Y };
                    //featuers_list[mm] = inner;

                    //viewer.Image = frame;
                    //viewer.ShowDialog();
                    //prev_frame = frame;

                    //Console.WriteLine("frame:{0} " + mm);
                    Console.WriteLine("frame:{0} " + mm + "  X:{1} " + mean_X + "   Y:{2} " + mean_Y);

                    mm++;
                }
                catch (Exception e)
                { Console.WriteLine(e.Message); }
            }
            //int go = 0;
            //foreach (double[] arr in featuers_list)
            //{
            //    Console.Write("frame:{0} ", go++);
            //    foreach (double d in arr)
            //        Console.Write(d + "    ");

            //    Console.WriteLine();
            //}
            Serialize.SerializeObject(F_L, save_path);
        }
Example #18
0
        private void process1()
        {
            var bitmap1 = (Bitmap)sourcebox1.Image;
            var bitmap2 = (Bitmap)sourcebox2.Image;
            var hash1   = ImagePhash.ComputeDigest(bitmap1.ToLuminanceImage());
            var hash2   = ImagePhash.ComputeDigest(bitmap2.ToLuminanceImage());
            var score   = ImagePhash.GetCrossCorrelation(hash1, hash2);

            Console.WriteLine("score: {0}", score);

            //threshold value
            var thres = new Threshold(110);

            Grayscale filter = new Grayscale(0.2125, 0.7154, 0.0721);
            // apply the filter to the model
            Bitmap grey1 = filter.Apply(bitmap1);

            thres.ApplyInPlace(grey1);

            // Apply the filter to the observed image
            Bitmap grey2 = filter.Apply(bitmap2);

            thres.ApplyInPlace(grey2);

            int modelPoints = 0, matchingPoints = 0;

            var skewChecker     = new DocumentSkewChecker();
            var angle1          = skewChecker.GetSkewAngle(grey1);
            var rotationFilter1 = new RotateBicubic(-angle1);

            rotationFilter1.FillColor = Color.White;
            grey1 = rotationFilter1.Apply(grey1);

            var angle2          = skewChecker.GetSkewAngle(grey2);
            var rotationFilter2 = new RotateBicubic(-angle2);

            rotationFilter2.FillColor = Color.White;
            grey2 = rotationFilter2.Apply(grey2);

            //CorrelationMatching matcher = new CorrelationMatching(5, grey1, grey2);
            //var results = matcher.GetHashCode();
            var detector = new FastCornersDetector(15);
            var freak    = new FastRetinaKeypointDetector(detector);

            FastRetinaKeypoint[] features1 = freak.Transform(grey1).ToArray();
            modelPoints = features1.Count();

            Console.WriteLine("count: {0}", modelPoints);

            FastRetinaKeypoint[] features2 = freak.Transform(grey2).ToArray();

            Console.WriteLine("count: {0}", features2.Count());

            KNearestNeighborMatching matcher = new KNearestNeighborMatching(7);

            //var length = 0;

            IntPoint[][] results = matcher.Match(features1, features2);
            matchingPoints = results[0].Count(); // similarity of image1 to image2
            ////matchingPoints = results[1].Count(); // similarity of image2 to image1

            Console.WriteLine("matched points: {0}", matchingPoints);

            sourcebox1.Image = bitmap1;
            sourcebox2.Image = bitmap2;
            var marker1 = new FeaturesMarker(features1, 30);
            var marker2 = new FeaturesMarker(features2, 30);



            double similPercent = 0;

            if (matchingPoints <= 0)
            {
                similPercent = 0.0f;
            }
            similPercent = (matchingPoints * 100d) / (double)modelPoints;

            Console.WriteLine("score: {0}", similPercent);

            simil1.Text = similPercent.ToString("##.##") + "%";
            simil2.Text = (score * 100.00d).ToString("##.##") + "%";

            angle_text.Text  = angle2.ToString("##.##") + "°";
            resultbox1.Image = marker1.Apply(grey1);
            resultbox2.Image = marker2.Apply(grey2);
        }