Exemple #1
0
        private void TrackFeatures(Image <Gray, byte> grayImage)
        {
            if (framesProcessed == 1)
            {
                keyPoints   = featureDetector.Detect(grayImage);
                kpVector    = new VectorOfPointF((keyPoints.Select(p => p.Point).ToArray()));
                nextVector  = new VectorOfPointF(kpVector.Size);
                statusArray = new VectorOfByte(kpVector.Size);
                errArray    = new VectorOfFloat(kpVector.Size);
            }
            else if (framesProcessed > 2)
            {
                kpVector = nextVector;
            }

            if (framesProcessed % 50 == 0)
            {
                kpVector = CreateGrid(currentImage);
            }

            if (framesProcessed >= 2)
            {
                CvInvoke.CalcOpticalFlowPyrLK(lastGray, grayImage, kpVector, nextVector, statusArray, errArray, new Size(trackBar1.Value * 2 + 2, trackBar1.Value * 2 + 2), trackBar4.Value, new MCvTermCriteria(trackBar2.Value, trackBar3.Value / 100.0));
                DrawPoints(nextVector, Color.Blue);
            }
        }
        public IEnumerable <Mat> DescribeMany(IEnumerable <Mat> images, out KeyPoint[][] keypoints)
        {
            List <Mat> descriptors = new List <Mat>();

            KeyPoint[][] keypoints2 = _featureDetector.Detect(images);
            _featureDetector.Compute(images, ref keypoints2, descriptors);
            keypoints = keypoints2;
            return(descriptors);
        }
Exemple #3
0
        static object[] DetectAndCompute2(UMat img, Feature2D detector, Feature2D computer) // находит и обрабатывает дескрипторы изображения
        {
            object[]         outp        = new object[0];
            UMat             descriptors = new UMat();
            var              mkp         = new MKeyPoint[0];
            VectorOfKeyPoint keypoints;

            try
            {
                mkp       = detector.Detect(img);
                keypoints = new VectorOfKeyPoint(mkp);
                computer.Compute(img, keypoints, descriptors);
                outp = new object[] { keypoints, descriptors };
            }
            finally
            {
            }
            return(outp);
        }
Exemple #4
0
        private static void BowTest()
        {
            DescriptorMatcher matcher   = new BFMatcher();
            Feature2D         extractor = AKAZE.Create();
            Feature2D         detector  = AKAZE.Create();

            TermCriteria              criteria               = new TermCriteria(CriteriaType.Count | CriteriaType.Eps, 10, 0.001);
            BOWKMeansTrainer          bowTrainer             = new BOWKMeansTrainer(200, criteria, 1);
            BOWImgDescriptorExtractor bowDescriptorExtractor = new BOWImgDescriptorExtractor(extractor, matcher);

            Mat img = null;

            KeyPoint[] keypoint = detector.Detect(img);
            Mat        features = new Mat();

            extractor.Compute(img, ref keypoint, features);
            bowTrainer.Add(features);

            throw new NotImplementedException();
        }
Exemple #5
0
        public static double hessianThresh       = 300;                                                                                           // настройка SURF
        public static void DetectAndCompute(Mat img, out VectorOfKeyPoint keypoints, out Mat descriptors, Feature2D detector, Feature2D computer) // находит и обрабатывает дескрипторы изображения
        {
            keypoints   = null;
            descriptors = new Mat();
            try
            {
                var mkp = detector.Detect(img, null);
                keypoints = new VectorOfKeyPoint(mkp);
            }
            catch (Exception e)
            {
                throw e;
            }

            try
            {
                computer.Compute(img, keypoints, descriptors);
            }
            catch (Exception e)
            {
                throw e;
            }
        }
Exemple #6
0
        private void CaptureOnImageGrabbed(object sender, EventArgs eventArgs)
        {
            var capture = (Capture)sender;

            var frame = new Mat();

            capture.Retrieve(frame);

            // 1. get key points
            var keyPoints = new VectorOfKeyPoint(_detector.Detect(frame));

            _tempCloudPoints.SetKeyFeatures(_selectedFrameIndex, keyPoints);

            // 2. get descripters
            var descripters = new Mat();

            _descripter.Compute(frame, keyPoints, descripters);

            // draw keypoints
            var imageFrame = new Mat();

            Features2DToolbox.DrawKeypoints(frame, keyPoints, imageFrame, new Bgr(Color.DarkBlue),
                                            Features2DToolbox.KeypointDrawType.NotDrawSinglePoints);

            if (_selectedFrameIndex != 0)
            {
                var previousKeyPoints      = _tempCloudPoints.GetKeyFeatures(_selectedFrameIndex - 1);
                var previousKeyDescripters = _previousDescripters;

                const int    k = 2;
                const double uniquenessThreshold = 0.8;

                // 3. compute all matches with previous frame
                var matches = new VectorOfVectorOfDMatch();
                var matcher = GetNativeMatcher(SelectedMatcher);
                matcher.Add(previousKeyDescripters);

                matcher.KnnMatch(descripters, matches, k, null);

                var mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                mask.SetTo(new MCvScalar(255));
                Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);
                Features2DToolbox.VoteForSizeAndOrientation(previousKeyPoints, keyPoints,
                                                            matches, mask, 1.5, 20);
                Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(previousKeyPoints,
                                                                         keyPoints, matches, mask, 2);

                var managedMask = mask.GetData();

                // 4. separate good matches
                var currentKeys = keyPoints;

                for (int i = 0; i < matches.Size; i++)
                {
                    var match = matches[i][0];
                    // filter wrong matches
                    if (managedMask[i] == 1)
                    {
                        var previousIndex = match.TrainIdx;
                        var currentIndex  = match.QueryIdx;

                        var previousPoint = previousKeyPoints[previousIndex].Point;
                        var currentPoint  = currentKeys[currentIndex].Point;

                        _tempCloudPoints.Unite(_selectedFrameIndex - 1, previousIndex,
                                               _selectedFrameIndex, currentIndex);

                        CvInvoke.Line(imageFrame,
                                      Point.Round(previousPoint),
                                      Point.Round(currentPoint),
                                      new Bgr(Color.Red).MCvScalar,
                                      2);
                    }
                }
            }

            _previousDescripters = descripters;

            PreviewImageSource = imageFrame;

            _selectedFrameIndex++;
            RaisePropertyChanged("Progress");
            RaisePropertyChanged("ProgressText");
            if (_selectedFrameIndex == _framesCount)
            {
                GeneratingStates = FeatureGeneratingStates.Finished;
            }
        }