예제 #1
0
        public static OdometerFrame GetOdometerFrame(Mat left, Mat right, Feature2D detector, Feature2D descriptor, DistanceType distanceType, double maxDistance, Image <Arthmetic, double> K, double takeBest = 1.0)
        {
            var match = MatchImagePair.Match(left, right, detector, descriptor, distanceType, maxDistance);

            var lps = match.LeftPointsList.Take((int)(match.LeftPoints.Size * takeBest));
            var rps = match.RightPointsList.Take((int)(match.RightPoints.Size * takeBest));

            var lps_n = lps.ToList();
            var rps_n = rps.ToList();
            var H     = EstimateHomography(lps_n, rps_n, K);

            if (IsPureRotation(H))
            {
                OdometerFrame odometerFrame = new OdometerFrame();
                odometerFrame.Rotation       = RotationConverter.MatrixToEulerXYZ(H);
                odometerFrame.RotationMatrix = RotationConverter.EulerXYZToMatrix(odometerFrame.Rotation);
                odometerFrame.MatK           = K;
                odometerFrame.Match          = match;
                odometerFrame.Translation    = new Image <Arthmetic, double>(1, 3);
                return(odometerFrame);
            }
            else
            {
                if (!FindTwoViewsMatrices(lps_n, rps_n, K, out var F, out var E, out var R, out var t, out var X))
                {
                    return(null);
                }

                OdometerFrame odometerFrame = new OdometerFrame();
                odometerFrame.Rotation       = RotationConverter.MatrixToEulerXYZ(R);
                odometerFrame.RotationMatrix = R;
                odometerFrame.MatK           = K;
                odometerFrame.Match          = match;

                Image <Arthmetic, double> C = R.T().Multiply(t).Mul(-1);
                odometerFrame.Translation = C.Mul(1.0 / C.Norm);
                return(odometerFrame);
            }
        }
예제 #2
0
        public OdometerFrame NextFrame(int left, int right, Func <int, int, MatchingResult> matcher)
        {
            MatchingResult match23 = matcher(left, right);

            if (match23.Matches.Size < MinimumCorrespondencesNeeded)
            {
                // Track of points is lost, at least temporarly. Let's put handling lost-track case ou of scope for now.
                lastGoodMatch = null;
                isContinuous  = false;
                return(null);
            }

            OdometerFrame frame = new OdometerFrame()
            {
                MatK           = K,
                Match          = match23,
                Rotation       = new Image <Arthmetic, double>(1, 3),
                RotationMatrix = RotationConverter.EulerXYZToMatrix(new Image <Arthmetic, double>(1, 3)),
                Translation    = new Image <Arthmetic, double>(1, 3)
            };

            // 1) Determine if transformation between next frames has high enough baseline to be accurate

            // 1a) For now lets determine it by finding if lone rotation is good enough
            var H = FindTransformation.EstimateHomography(match23.LeftPointsList, match23.RightPointsList, K);

            if (FindTransformation.IsPureRotation(H, RotationTreshold1, RotationTreshold2))
            {
                // 1c) If not then transformation is described only by rotation
                // 1b) Find rotation and rotate all points in current set
                isContinuous         = false;
                frame.Rotation       = RotationConverter.MatrixToEulerXYZ(H);
                frame.RotationMatrix = RotationConverter.EulerXYZToMatrix(frame.Rotation);

                if (last3dPoints != null && R12 != null)
                {
                    last3dPoints = Utils.PutRTo4x4(frame.RotationMatrix).Multiply(last3dPoints);
                    R12          = frame.RotationMatrix.Multiply(R12);
                }
                else
                {
                    R12 = frame.RotationMatrix;
                }

                // 1c) Skip frame and wait for next one (but save matches)
                return(frame);
            }

            // 2) We have legit frames
            if (!FindTransformation.FindTwoViewsMatrices(match23.LeftPointsList, match23.RightPointsList, K,
                                                         out var F23, out var E23, out var R23, out var t23, out var X23))
            {
                // 3a) Or not
                isContinuous = false;
                return(null);
            }

            frame.Rotation       = RotationConverter.MatrixToEulerXYZ(R23);
            frame.RotationMatrix = R23;
            frame.Translation    = t23;

            // 3) Find same points between old frame and current one
            if (lastGoodMatch == null)
            {
                last3dPoints = X23;
                isContinuous = true;
            }
            else
            {
                #region NonContinousCase
                //if (!isContinuous) // This doesn't work well. Lets put it out of scope and just reset scale
                // {
                // Find correspondences between last right and new left
                //var match12 = lastGoodMatch;
                //var match34 = match23;
                //var match23_ = matcher(lastGoodRightImage, left); // TODO: make use of already found feature points

                //var correspondences23to34 = Correspondences.FindCorrespondences12to23(match23_, match34);

                //// Now extend each correspondence to 4 points - find if point on 2 is matched to some point on 1
                //var correspondences13to34 = new List<Correspondences.MatchPair>();
                //foreach(var c in correspondences23to34)
                //{
                //    var m23 = c.Match12;
                //    for (int i = 0; i < match12.Matches.Size; ++i)
                //    {
                //        if(match12.Matches[i].TrainIdx == m23.QueryIdx)
                //        {
                //            correspondences13to34.Add(new Correspondences.MatchPair()
                //            {
                //                Kp1 = match12.LeftKps[match12.Matches[i].QueryIdx],
                //                Kp2 = c.Kp2,
                //                Kp3 = c.Kp3
                //            });
                //        }
                //    }
                //}

                //if (correspondences13to34.Count >= MinimumCorrespondencesNeeded)
                //{
                //    var t13 = R12.Multiply(c12).Mul(-1);

                //    FindBestScale(R12, t13, R23, t23, K, correspondences13to34, MinimumCorrespondencesNeeded, out double scale, out double confidence, out List<int> inliers);

                //    t23 = t23.Mul(scale);
                //    frame.Translation = t23;

                //    FindTransformation.TriangulateChieral(match23.LeftPointsList, match23.RightPointsList, K, R23, t23, out last3dPoints);

                //    isContinuous = true;
                //}
                //else
                //{
                //    isContinuous = false;
                //}
                //  }
                #endregion
                if (isContinuous)
                {
                    var correspondences = Correspondences.FindCorrespondences12to23(lastGoodMatch, match23);
                    if (correspondences.Count >= MinimumCorrespondencesNeeded)
                    {
                        // Normalize to |t| = 1
                        t12 = t12.Mul(1.0 / t12.Norm);
                        t23 = t23.Mul(1.0 / t23.Norm);

                        FindBestScale(R12, t12, R23, t23, K, correspondences, MinimumCorrespondencesNeeded, out double scale, out double confidence, out List <int> inliers);

                        t23 = t23.Mul(scale);
                        frame.Translation = t23;

                        FindTransformation.TriangulateChieral(match23.LeftPointsList, match23.RightPointsList, K, R23, t23, out last3dPoints);
                    }
                    else
                    {
                        isContinuous = false;
                    }
                }
            }

            lastGoodMatch      = match23;
            lastGoodLeftImage  = left;
            lastGoodRightImage = right;
            R12 = R23;
            t12 = t23;
            c12 = R23.T().Multiply(t23).Mul(-1);

            return(frame);
        }