Exemplo n.º 1
0
        public IEnumerable <Mat> DescribeMany(IEnumerable <Mat> images, out KeyPoint[][] keypoints)
        {
            List <Mat> descriptors = new List <Mat>();

            KeyPoint[][] keypoints2 = _featureDetector.Detect(images);
            _featureDetector.Compute(images, ref keypoints2, descriptors);
            keypoints = keypoints2;
            return(descriptors);
        }
Exemplo n.º 2
0
        public static void ExtractFeatures(Mat image, out VectorOfKeyPoint keypoints, out Mat descriptors)
        {
            keypoints   = new VectorOfKeyPoint();
            descriptors = new Mat();

            using (var uModelImage = image.ToUMat(AccessType.Read))
            {
                //_featureDetector.DetectAndCompute(uModelImage, null, keypoints, descriptors, false);

                _featureDetector.DetectRaw(uModelImage, keypoints);
                _descriptorsComputer.Compute(uModelImage, keypoints, descriptors);
            }
        }
Exemplo n.º 3
0
        static object[] DetectAndCompute2(UMat img, Feature2D detector, Feature2D computer) // находит и обрабатывает дескрипторы изображения
        {
            object[]         outp        = new object[0];
            UMat             descriptors = new UMat();
            var              mkp         = new MKeyPoint[0];
            VectorOfKeyPoint keypoints;

            try
            {
                mkp       = detector.Detect(img);
                keypoints = new VectorOfKeyPoint(mkp);
                computer.Compute(img, keypoints, descriptors);
                outp = new object[] { keypoints, descriptors };
            }
            finally
            {
            }
            return(outp);
        }
Exemplo n.º 4
0
        private static void BowTest()
        {
            DescriptorMatcher matcher   = new BFMatcher();
            Feature2D         extractor = AKAZE.Create();
            Feature2D         detector  = AKAZE.Create();

            TermCriteria              criteria               = new TermCriteria(CriteriaType.Count | CriteriaType.Eps, 10, 0.001);
            BOWKMeansTrainer          bowTrainer             = new BOWKMeansTrainer(200, criteria, 1);
            BOWImgDescriptorExtractor bowDescriptorExtractor = new BOWImgDescriptorExtractor(extractor, matcher);

            Mat img = null;

            KeyPoint[] keypoint = detector.Detect(img);
            Mat        features = new Mat();

            extractor.Compute(img, ref keypoint, features);
            bowTrainer.Add(features);

            throw new NotImplementedException();
        }
Exemplo n.º 5
0
Arquivo: SURF.cs Projeto: okeanz/IPS
        public static double hessianThresh       = 300;                                                                                           // настройка SURF
        public static void DetectAndCompute(Mat img, out VectorOfKeyPoint keypoints, out Mat descriptors, Feature2D detector, Feature2D computer) // находит и обрабатывает дескрипторы изображения
        {
            keypoints   = null;
            descriptors = new Mat();
            try
            {
                var mkp = detector.Detect(img, null);
                keypoints = new VectorOfKeyPoint(mkp);
            }
            catch (Exception e)
            {
                throw e;
            }

            try
            {
                computer.Compute(img, keypoints, descriptors);
            }
            catch (Exception e)
            {
                throw e;
            }
        }
Exemplo n.º 6
0
        public static bool TestFeature2DTracker(Feature2D keyPointDetector, Feature2D descriptorGenerator)
        {
            //for (int k = 0; k < 1; k++)
            {
                Feature2D feature2D = null;
                if (keyPointDetector == descriptorGenerator)
                {
                    feature2D = keyPointDetector as Feature2D;
                }

                Mat modelImage = EmguAssert.LoadMat("box.png");
                //Image<Gray, Byte> modelImage = new Image<Gray, byte>("stop.jpg");
                //modelImage = modelImage.Resize(400, 400, true);

                //modelImage._EqualizeHist();

                #region extract features from the object image
                Stopwatch        stopwatch      = Stopwatch.StartNew();
                VectorOfKeyPoint modelKeypoints = new VectorOfKeyPoint();
                Mat modelDescriptors            = new Mat();
                if (feature2D != null)
                {
                    feature2D.DetectAndCompute(modelImage, null, modelKeypoints, modelDescriptors, false);
                }
                else
                {
                    keyPointDetector.DetectRaw(modelImage, modelKeypoints);
                    descriptorGenerator.Compute(modelImage, modelKeypoints, modelDescriptors);
                }
                stopwatch.Stop();
                EmguAssert.WriteLine(String.Format("Time to extract feature from model: {0} milli-sec", stopwatch.ElapsedMilliseconds));
                #endregion

                //Image<Gray, Byte> observedImage = new Image<Gray, byte>("traffic.jpg");
                Image <Gray, Byte> observedImage = EmguAssert.LoadImage <Gray, byte>("box_in_scene.png");
                //Image<Gray, Byte> observedImage = modelImage.Rotate(45, new Gray(0.0));
                //image = image.Resize(400, 400, true);

                //observedImage._EqualizeHist();
                #region extract features from the observed image
                stopwatch.Reset();
                stopwatch.Start();
                VectorOfKeyPoint observedKeypoints = new VectorOfKeyPoint();
                using (Mat observedDescriptors = new Mat())
                {
                    if (feature2D != null)
                    {
                        feature2D.DetectAndCompute(observedImage, null, observedKeypoints, observedDescriptors, false);
                    }
                    else
                    {
                        keyPointDetector.DetectRaw(observedImage, observedKeypoints);
                        descriptorGenerator.Compute(observedImage, observedKeypoints, observedDescriptors);
                    }

                    stopwatch.Stop();
                    EmguAssert.WriteLine(String.Format("Time to extract feature from image: {0} milli-sec", stopwatch.ElapsedMilliseconds));
                    #endregion

                    //Merge the object image and the observed image into one big image for display
                    Image <Gray, Byte> res = modelImage.ToImage <Gray, Byte>().ConcateVertical(observedImage);

                    Rectangle rect = new Rectangle(Point.Empty, modelImage.Size);
                    PointF[]  pts  = new PointF[] {
                        new PointF(rect.Left, rect.Bottom),
                        new PointF(rect.Right, rect.Bottom),
                        new PointF(rect.Right, rect.Top),
                        new PointF(rect.Left, rect.Top)
                    };

                    Mat homography = null;

                    stopwatch.Reset();
                    stopwatch.Start();

                    int          k  = 2;
                    DistanceType dt = modelDescriptors.Depth == CvEnum.DepthType.Cv8U ? DistanceType.Hamming : DistanceType.L2;
                    //using (Matrix<int> indices = new Matrix<int>(observedDescriptors.Rows, k))
                    //using (Matrix<float> dist = new Matrix<float>(observedDescriptors.Rows, k))
                    using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
                        using (BFMatcher matcher = new BFMatcher(dt))
                        {
                            //ParamDef[] parameterDefs = matcher.GetParams();
                            matcher.Add(modelDescriptors);
                            matcher.KnnMatch(observedDescriptors, matches, k, null);

                            Mat mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                            mask.SetTo(new MCvScalar(255));
                            //mask.SetValue(255);
                            Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);

                            int nonZeroCount = CvInvoke.CountNonZero(mask);
                            if (nonZeroCount >= 4)
                            {
                                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeypoints, observedKeypoints, matches, mask, 1.5, 20);
                                if (nonZeroCount >= 4)
                                {
                                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeypoints, observedKeypoints, matches, mask, 2);
                                }
                            }
                        }
                    stopwatch.Stop();
                    EmguAssert.WriteLine(String.Format("Time for feature matching: {0} milli-sec", stopwatch.ElapsedMilliseconds));

                    bool success = false;
                    if (homography != null)
                    {
                        PointF[] points = pts.Clone() as PointF[];
                        points = CvInvoke.PerspectiveTransform(points, homography);
                        //homography.ProjectPoints(points);

                        for (int i = 0; i < points.Length; i++)
                        {
                            points[i].Y += modelImage.Height;
                        }

                        res.DrawPolyline(
#if NETFX_CORE
                            Extensions.
#else
                            Array.
#endif
                            ConvertAll <PointF, Point>(points, Point.Round), true, new Gray(255.0), 5);

                        success = true;
                    }
                    //Emgu.CV.UI.ImageViewer.Show(res);
                    return(success);
                }



                /*
                 * stopwatch.Reset(); stopwatch.Start();
                 * //set the initial region to be the whole image
                 * using (Image<Gray, Single> priorMask = new Image<Gray, float>(observedImage.Size))
                 * {
                 * priorMask.SetValue(1.0);
                 * homography = tracker.CamShiftTrack(
                 *    observedFeatures,
                 *    (RectangleF)observedImage.ROI,
                 *    priorMask);
                 * }
                 * Trace.WriteLine(String.Format("Time for feature tracking: {0} milli-sec", stopwatch.ElapsedMilliseconds));
                 *
                 * if (homography != null) //set the initial tracking window to be the whole image
                 * {
                 * PointF[] points = pts.Clone() as PointF[];
                 * homography.ProjectPoints(points);
                 *
                 * for (int i = 0; i < points.Length; i++)
                 *    points[i].Y += modelImage.Height;
                 * res.DrawPolyline(Array.ConvertAll<PointF, Point>(points, Point.Round), true, new Gray(255.0), 5);
                 * return true;
                 * }
                 * else
                 * {
                 * return false;
                 * }*/
            }
        }
Exemplo n.º 7
0
        private void CaptureOnImageGrabbed(object sender, EventArgs eventArgs)
        {
            var capture = (Capture)sender;

            var frame = new Mat();

            capture.Retrieve(frame);

            // 1. get key points
            var keyPoints = new VectorOfKeyPoint(_detector.Detect(frame));

            _tempCloudPoints.SetKeyFeatures(_selectedFrameIndex, keyPoints);

            // 2. get descripters
            var descripters = new Mat();

            _descripter.Compute(frame, keyPoints, descripters);

            // draw keypoints
            var imageFrame = new Mat();

            Features2DToolbox.DrawKeypoints(frame, keyPoints, imageFrame, new Bgr(Color.DarkBlue),
                                            Features2DToolbox.KeypointDrawType.NotDrawSinglePoints);

            if (_selectedFrameIndex != 0)
            {
                var previousKeyPoints      = _tempCloudPoints.GetKeyFeatures(_selectedFrameIndex - 1);
                var previousKeyDescripters = _previousDescripters;

                const int    k = 2;
                const double uniquenessThreshold = 0.8;

                // 3. compute all matches with previous frame
                var matches = new VectorOfVectorOfDMatch();
                var matcher = GetNativeMatcher(SelectedMatcher);
                matcher.Add(previousKeyDescripters);

                matcher.KnnMatch(descripters, matches, k, null);

                var mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                mask.SetTo(new MCvScalar(255));
                Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);
                Features2DToolbox.VoteForSizeAndOrientation(previousKeyPoints, keyPoints,
                                                            matches, mask, 1.5, 20);
                Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(previousKeyPoints,
                                                                         keyPoints, matches, mask, 2);

                var managedMask = mask.GetData();

                // 4. separate good matches
                var currentKeys = keyPoints;

                for (int i = 0; i < matches.Size; i++)
                {
                    var match = matches[i][0];
                    // filter wrong matches
                    if (managedMask[i] == 1)
                    {
                        var previousIndex = match.TrainIdx;
                        var currentIndex  = match.QueryIdx;

                        var previousPoint = previousKeyPoints[previousIndex].Point;
                        var currentPoint  = currentKeys[currentIndex].Point;

                        _tempCloudPoints.Unite(_selectedFrameIndex - 1, previousIndex,
                                               _selectedFrameIndex, currentIndex);

                        CvInvoke.Line(imageFrame,
                                      Point.Round(previousPoint),
                                      Point.Round(currentPoint),
                                      new Bgr(Color.Red).MCvScalar,
                                      2);
                    }
                }
            }

            _previousDescripters = descripters;

            PreviewImageSource = imageFrame;

            _selectedFrameIndex++;
            RaisePropertyChanged("Progress");
            RaisePropertyChanged("ProgressText");
            if (_selectedFrameIndex == _framesCount)
            {
                GeneratingStates = FeatureGeneratingStates.Finished;
            }
        }