Example #1
0
        private void PrintMatricesInfo(Image <Arthmetic, double> E, Image <Arthmetic, double> K, Image <Arthmetic, double> R, Image <Arthmetic, double> t)
        {
            StringBuilder sb = new StringBuilder();

            Svd svd = new Svd(E);

            sb.AppendLine(string.Format("E s1 = {0}, s2 = {1}", svd.S[0, 0], svd.S[1, 0]));

            sb.AppendLine();
            sb.AppendLine(string.Format("fx = {0}, fy = {1}", K[0, 0], K[1, 1]));
            sb.AppendLine(string.Format("px = {0}, py = {1}", K[0, 2], K[1, 2]));

            sb.AppendLine();
            sb.AppendLine(string.Format("tx = {0} ", t[0, 0]));
            sb.AppendLine(string.Format("ty = {0}", t[1, 0]));
            sb.AppendLine(string.Format("tz = {0},", t[2, 0]));

            var r = RotationConverter.MatrixToEulerXYZ(R);

            sb.AppendLine();
            sb.AppendLine(string.Format("rx = {0} ", r[0, 0]));
            sb.AppendLine(string.Format("ry = {0}", r[1, 0]));
            sb.AppendLine(string.Format("rz = {0},", r[2, 0]));

            info.Text = sb.ToString();
        }
Example #2
0
        public static OdometerFrame GetOdometerFrame(Mat left, Mat right, Feature2D detector, Feature2D descriptor, DistanceType distanceType, double maxDistance, Image <Arthmetic, double> K, double takeBest = 1.0)
        {
            var match = MatchImagePair.Match(left, right, detector, descriptor, distanceType, maxDistance);

            var lps = match.LeftPointsList.Take((int)(match.LeftPoints.Size * takeBest));
            var rps = match.RightPointsList.Take((int)(match.RightPoints.Size * takeBest));

            var lps_n = lps.ToList();
            var rps_n = rps.ToList();
            var H     = EstimateHomography(lps_n, rps_n, K);

            if (IsPureRotation(H))
            {
                OdometerFrame odometerFrame = new OdometerFrame();
                odometerFrame.Rotation       = RotationConverter.MatrixToEulerXYZ(H);
                odometerFrame.RotationMatrix = RotationConverter.EulerXYZToMatrix(odometerFrame.Rotation);
                odometerFrame.MatK           = K;
                odometerFrame.Match          = match;
                odometerFrame.Translation    = new Image <Arthmetic, double>(1, 3);
                return(odometerFrame);
            }
            else
            {
                if (!FindTwoViewsMatrices(lps_n, rps_n, K, out var F, out var E, out var R, out var t, out var X))
                {
                    return(null);
                }

                OdometerFrame odometerFrame = new OdometerFrame();
                odometerFrame.Rotation       = RotationConverter.MatrixToEulerXYZ(R);
                odometerFrame.RotationMatrix = R;
                odometerFrame.MatK           = K;
                odometerFrame.Match          = match;

                Image <Arthmetic, double> C = R.T().Multiply(t).Mul(-1);
                odometerFrame.Translation = C.Mul(1.0 / C.Norm);
                return(odometerFrame);
            }
        }
Example #3
0
        private static void DecomposeTransformationMatrices(Dataset dataset, TimeSpan interval)
        {
            DatasetFrame prev = null;

            foreach (var frame in dataset.Frames)
            {
                var translation = new Emgu.CV.Image <Arthmetic, double>(1, 3);
                for (int i = 0; i < 3; ++i)
                {
                    translation[i, 0] = frame.TransformationMatrix[i, 3];
                }

                var rotationMatrix = frame.TransformationMatrix.GetSubRect(new Rectangle(0, 0, 3, 3));
                var euler          = RotationConverter.MatrixToEulerXYZ(rotationMatrix);

                OdometerFrame odometry = new OdometerFrame()
                {
                    TimeDiff        = interval,
                    Translation     = translation,
                    Rotation        = euler,
                    Velocity        = null,
                    AngularVelocity = null,
                };

                if (prev != null)
                {
                    odometry.TranslationDiff = odometry.Translation - prev.Odometry.Translation;
                    odometry.RotationDiff    = odometry.Rotation - prev.Odometry.Rotation;
                }
                else
                {
                    odometry.TranslationDiff = new Emgu.CV.Image <Arthmetic, double>(1, 3);
                    odometry.RotationDiff    = new Emgu.CV.Image <Arthmetic, double>(1, 3);
                }

                frame.Odometry = odometry;
                prev           = frame;
            }
        }
Example #4
0
        private void UdpateFrame(int n)
        {
            if (Frames == null || n >= Frames.Count - Step || n < 0)
            {
                isRunning = false;
                nextFrameTimer.Stop();
                return;
            }

            Dispatcher.BeginInvoke((Action)(() =>
            {
                currentFrame = n;
                var frame = frames[n];
                var frame2 = frames[n + Step];

                var mat = CvInvoke.Imread(frame.ImageFile, Emgu.CV.CvEnum.ImreadModes.Color).ToImage <Bgr, byte>();
                var mat2 = CvInvoke.Imread(frame2.ImageFile, Emgu.CV.CvEnum.ImreadModes.Color).ToImage <Bgr, byte>();

                try
                {
                    double maxDistance = 20.0;
                    OdometerFrame odometerFrame = FindTransformation.GetOdometerFrame(mat.Mat, mat2.Mat, Detector, Descriptor, DistanceType, maxDistance, K);
                    if (odometerFrame != null)
                    {
                        videoViewer.Source = new BitmapImage(new Uri(frame.ImageFile, UriKind.Absolute));
                        recursive = true;
                        frameProgression.Value = n;
                        recursive = false;
                        frameCurrentLabel.Content = n;

                        totalRotation = odometerFrame.RotationMatrix.Multiply(totalRotation);
                        var rotationEuler = RotationConverter.MatrixToEulerXYZ(totalRotation);
                        totalTranslation = totalTranslation + odometerFrame.Translation;

                        var refTranslation = frame2.Odometry.Translation.Sub(frames[0].Odometry.Translation);
                        var refRotation = frames[0].Odometry.RotationMatrix.T().Multiply(frame2.Odometry.RotationMatrix);
                        var refRotationEuler = RotationConverter.MatrixToEulerXYZ(refRotation);

                        var refTranslationDiff = frame2.Odometry.Translation.Sub(frame.Odometry.Translation);
                        var refRotationDiff = frame.Odometry.RotationMatrix.T().Multiply(frame2.Odometry.RotationMatrix);
                        var refRotationDiffEuler = RotationConverter.MatrixToEulerXYZ(refRotationDiff);

                        infoReference.Text = FormatInfo(refTranslation, refRotationEuler, "Ref Cumulative");
                        infoReferenceDiff.Text = FormatInfo(refTranslationDiff, refRotationDiffEuler, "Ref Diff");
                        infoComputed.Text = FormatInfo(odometerFrame.Translation, odometerFrame.Rotation, "Comp Diff");
                        infoComputedCumulative.Text = FormatInfo(totalTranslation, rotationEuler, "Comp Cumulative");
                        infoK.Text = FormatInfoK(odometerFrame);

                        MatchDrawer.DrawFeatures(mat.Mat, mat2.Mat, odometerFrame.Match, TakeBest, matchedView);
                    }
                }
                catch (Exception e)
                {
                    infoComputed.Text = "Error!";
                }

                if (isRunning)
                {
                    nextFrameTimer.Start();
                }
            }));
        }
        public void ProcessImages(Mat left, Mat middle, Mat right, Feature2D detector, Feature2D descriptor, DistanceType distance)
        {
            double maxDistance = 20.0;
            var    match12     = MatchImagePair.Match(left, middle, detector, descriptor, distance, maxDistance);
            var    match23     = MatchImagePair.Match(middle, right, detector, descriptor, distance, maxDistance);
            var    match13     = MatchImagePair.Match(left, right, detector, descriptor, distance, maxDistance);

            TripletMatch tmatch = new TripletMatch();

            List <MDMatch> m12 = new List <MDMatch>();
            List <MDMatch> m23 = new List <MDMatch>();

            var left1    = match12.LeftPoints;
            var right1   = match12.RightPoints;
            var left2    = match23.LeftPoints;
            var left2_X  = MatchClosePoints.SortByX(match23.LeftPoints);
            var right2   = match23.RightPoints;
            var left3    = match13.LeftPoints;
            var right3   = match13.RightPoints;
            var right3_X = MatchClosePoints.SortByX(match13.LeftPoints);

            for (int idx12 = 0; idx12 < left1.Size; ++idx12)
            {
                var p1    = left1[idx12];
                var p2    = right1[idx12];
                int idx23 = IndexOf_X(left2_X, p2);
                if (idx23 != -1)
                {
                    var p3    = right2[idx23];
                    int idx13 = IndexOf_X(right3_X, p1);
                    if (idx13 != -1)
                    {
                        if (AreEqual(left1[idx12], left3[idx13]))
                        {
                            tmatch.Left.Add(p1);
                            tmatch.Middle.Add(p2);
                            tmatch.Right.Add(p3);

                            m12.Add(match12.Matches[idx12]);
                            m23.Add(match23.Matches[idx23]);
                        }
                    }
                }
            }

            match12.Matches = new VectorOfDMatch(m12.ToArray());
            match23.Matches = new VectorOfDMatch(m23.ToArray());

            MatchDrawer.DrawFeatures(left, right, match12, 1.0, bottomView);
            MatchDrawer.DrawFeatures(left, right, match23, 1.0, upperView);

            var F12 = ComputeMatrix.F(new VectorOfPointF(tmatch.Left.ToArray()), new VectorOfPointF(tmatch.Middle.ToArray()));
            var F23 = ComputeMatrix.F(new VectorOfPointF(tmatch.Middle.ToArray()), new VectorOfPointF(tmatch.Right.ToArray()));
            var F13 = ComputeMatrix.F(new VectorOfPointF(tmatch.Left.ToArray()), new VectorOfPointF(tmatch.Right.ToArray()));

            if (F12 == null || F23 == null || F13 == null)
            {
                info.Text = "Too few matches";
                return;
            }

            var Fs = new List <Image <Arthmetic, double> > {
                F12, F23, F13
            };

            var K = EstimateCameraFromImageSequence.K(Fs, left.Width, right.Height);

            var Es = new List <Image <Arthmetic, double> >
            {
                ComputeMatrix.E(F12, K),
                ComputeMatrix.E(F23, K),
                ComputeMatrix.E(F13, K)
            };

            FindTransformation.DecomposeToRTAndTriangulate(tmatch.Left, tmatch.Middle, K, Es[0],
                                                           out Image <Arthmetic, double> R12, out Image <Arthmetic, double> t12, out Image <Arthmetic, double> X12);
            FindTransformation.DecomposeToRTAndTriangulate(tmatch.Middle, tmatch.Right, K, Es[1],
                                                           out Image <Arthmetic, double> R23, out Image <Arthmetic, double> t23, out Image <Arthmetic, double> X23);
            FindTransformation.DecomposeToRTAndTriangulate(tmatch.Left, tmatch.Right, K, Es[2],
                                                           out Image <Arthmetic, double> R13, out Image <Arthmetic, double> t13, out Image <Arthmetic, double> X13);

            var Rs = new List <Image <Arthmetic, double> >
            {
                RotationConverter.MatrixToEulerXYZ(R12),
                RotationConverter.MatrixToEulerXYZ(R23),
                RotationConverter.MatrixToEulerXYZ(R13)
            };
            var ts = new List <Image <Arthmetic, double> >
            {
                t12,
                t23,
                t13
            };

            PrintMatricesInfo(Es, K, Rs, ts);
        }
Example #6
0
        private void UdpateFrame(int n)
        {
            if (Frames == null || n >= Frames.Count - Step || n < 0)
            {
                isRunning = false;
                nextFrameTimer.Stop();
                return;
            }

            Dispatcher.BeginInvoke((Action)(() =>
            {
                currentFrame = n;

                try
                {
                    var frame = frames[n];
                    var frame2 = frames[n + Step];

                    frame = Undistort(frame);
                    frame2 = Undistort(frame2);

                    var mat = frame.ToImage <Bgr, byte>();
                    var mat2 = frame2.ToImage <Bgr, byte>();

                    double maxDistance = MaxDistance(frame);

                    Func <int, int, MatchingResult> matcher = (i1, i2) =>
                    {
                        if (!features.TryGetValue(i1, out var features1))
                        {
                            MatchImagePair.FindFeatures(frames[i1], Detector, Descriptor, out MKeyPoint[] kps1, out Mat desc1);
                            features1 = new MatchingResult()
                            {
                                LeftKps = kps1,
                                LeftDescriptors = desc1
                            };
                        }

                        if (!features.TryGetValue(i2, out var features2))
                        {
                            MatchImagePair.FindFeatures(frames[i2], Detector, Descriptor, out MKeyPoint[] kps2, out Mat desc2);
                            features2 = new MatchingResult()
                            {
                                LeftKps = kps2,
                                LeftDescriptors = desc2
                            };
                        }

                        return(MatchImagePair.Match(features1.LeftKps, features1.LeftDescriptors, features2.LeftKps, features2.LeftDescriptors, DistanceType, maxDistance));
                    };

                    OdometerFrame odometerFrame = scaler.NextFrame(n, n + Step, matcher);
                    // OdometerFrame odometerFrame = FindTransformation.GetOdometerFrame(mat.Mat, mat2.Mat, Detector, Descriptor, DistanceType, maxDistance, K);
                    if (odometerFrame != null)
                    {
                        videoViewer.Source = ImageLoader.ImageSourceForBitmap(frame.Bitmap);
                        recursive = true;
                        frameProgression.Value = n;
                        recursive = false;
                        frameCurrentLabel.Content = n;

                        totalRotation = odometerFrame.RotationMatrix.Multiply(totalRotation);
                        var rotationEuler = RotationConverter.MatrixToEulerXYZ(totalRotation);
                        totalTranslation = totalTranslation + odometerFrame.Translation;

                        infoComputed.Text = FormatInfo(odometerFrame.Translation, odometerFrame.Rotation, "Comp Diff");
                        infoComputedCumulative.Text = FormatInfo(totalTranslation, rotationEuler, "Comp Cumulative");
                        infoK.Text = FormatInfoK(odometerFrame);

                        MatchDrawer.DrawFeatures(mat.Mat, mat2.Mat, odometerFrame.Match, TakeBest, matchedView);
                    }
                }
                catch (Exception e)
                {
                    infoComputed.Text = "Error!";
                }

                if (isRunning)
                {
                    nextFrameTimer.Start();
                }
            }));
        }
Example #7
0
        public static OdometerFrame GetOdometerFrame3(
            Mat left, Mat middle, Mat right, double lastScale, out double thisScale,
            Feature2D detector, Feature2D descriptor, DistanceType distanceType, double maxDistance,
            Image <Arthmetic, double> K, double takeBest = 1.0)
        {
            thisScale = lastScale;

            var match12 = MatchImagePair.Match(left, middle, detector, descriptor, distanceType, maxDistance);
            var match23 = MatchImagePair.Match(middle, right, detector, descriptor, distanceType, maxDistance);
            var match13 = MatchImagePair.Match(left, right, detector, descriptor, distanceType, maxDistance);

            var left1    = match12.LeftPoints;
            var right1   = match12.RightPoints;
            var left2    = match23.LeftPoints;
            var left2_X  = MatchClosePoints.SortByX(match23.LeftPoints);
            var right2   = match23.RightPoints;
            var left3    = match13.LeftPoints;
            var right3   = match13.RightPoints;
            var right3_X = MatchClosePoints.SortByX(match13.LeftPoints);

            TripletMatch tmatch = new TripletMatch();

            List <MDMatch> m12 = new List <MDMatch>();
            List <MDMatch> m23 = new List <MDMatch>();

            for (int idx12 = 0; idx12 < left1.Size; ++idx12)
            {
                var p1    = left1[idx12];
                var p2    = right1[idx12];
                int idx23 = IndexOf_X(left2_X, p2);
                if (idx23 != -1)
                {
                    var p3    = right2[idx23];
                    int idx13 = IndexOf_X(right3_X, p1);
                    if (idx13 != -1)
                    {
                        if (AreEqual(left1[idx12], left3[idx13], maxDistance))
                        {
                            tmatch.Left.Add(p1);
                            tmatch.Middle.Add(p2);
                            tmatch.Right.Add(p3);

                            m12.Add(match12.Matches[idx12]);
                            m23.Add(match23.Matches[idx23]);
                        }
                    }
                }
            }

            match12.Matches = new VectorOfDMatch(m12.ToArray());
            match23.Matches = new VectorOfDMatch(m23.ToArray());

            var F12 = ComputeMatrix.F(new VectorOfPointF(tmatch.Left.ToArray()), new VectorOfPointF(tmatch.Middle.ToArray()));
            //  var F23 = ComputeMatrix.F(new VectorOfPointF(tmatch.Middle.ToArray()), new VectorOfPointF(tmatch.Right.ToArray()));
            var F13 = ComputeMatrix.F(new VectorOfPointF(tmatch.Left.ToArray()), new VectorOfPointF(tmatch.Right.ToArray()));

            if (F12 == null || F13 == null)
            {
                return(null);
            }

            var Es = new List <Image <Arthmetic, double> >
            {
                ComputeMatrix.E(F12, K),
                //  ComputeMatrix.E(F23, K),
                ComputeMatrix.E(F13, K)
            };

            FindTransformation.DecomposeToRTAndTriangulate(tmatch.Left, tmatch.Middle, K, Es[0],
                                                           out Image <Arthmetic, double> R12, out Image <Arthmetic, double> t12, out Image <Arthmetic, double> X12);
            // FindTransformation.DecomposeToRT(Es[1], out Image<Arthmetic, double> R23, out Image<Arthmetic, double> t23);
            FindTransformation.DecomposeToRTAndTriangulate(tmatch.Left, tmatch.Right, K, Es[1],
                                                           out Image <Arthmetic, double> R13, out Image <Arthmetic, double> t13, out Image <Arthmetic, double> X13);

            var Rs = new List <Image <Arthmetic, double> >
            {
                R12,
                R13
            };
            var ts = new List <Image <Arthmetic, double> >
            {
                t12,
                t13
            };

            var cc = ComputeCameraCenter3(K, Rs, ts, tmatch);

            OdometerFrame odometerFrame = new OdometerFrame();

            odometerFrame.Rotation       = RotationConverter.MatrixToEulerXYZ(Rs[0]);
            odometerFrame.RotationMatrix = Rs[0];
            odometerFrame.MatK           = K;
            odometerFrame.Match          = match12;

            //    Image<Arthmetic, double> C = ComputeCameraCenter(R, t, K, match);
            //  odometerFrame.Translation = R.Multiply(C);
            //   odometerFrame.Translation = R.T().Multiply(odometerFrame.Translation);
            odometerFrame.Translation = ts[0].Mul(lastScale / ts[0].Norm);
            odometerFrame.Center      = lastScale * cc.C12;
            thisScale = cc.Ratio3To2;

            return(odometerFrame);
        }
Example #8
0
        public OdometerFrame NextFrame(int left, int right, Func <int, int, MatchingResult> matcher)
        {
            MatchingResult match23 = matcher(left, right);

            if (match23.Matches.Size < MinimumCorrespondencesNeeded)
            {
                // Track of points is lost, at least temporarly. Let's put handling lost-track case ou of scope for now.
                lastGoodMatch = null;
                isContinuous  = false;
                return(null);
            }

            OdometerFrame frame = new OdometerFrame()
            {
                MatK           = K,
                Match          = match23,
                Rotation       = new Image <Arthmetic, double>(1, 3),
                RotationMatrix = RotationConverter.EulerXYZToMatrix(new Image <Arthmetic, double>(1, 3)),
                Translation    = new Image <Arthmetic, double>(1, 3)
            };

            // 1) Determine if transformation between next frames has high enough baseline to be accurate

            // 1a) For now lets determine it by finding if lone rotation is good enough
            var H = FindTransformation.EstimateHomography(match23.LeftPointsList, match23.RightPointsList, K);

            if (FindTransformation.IsPureRotation(H, RotationTreshold1, RotationTreshold2))
            {
                // 1c) If not then transformation is described only by rotation
                // 1b) Find rotation and rotate all points in current set
                isContinuous         = false;
                frame.Rotation       = RotationConverter.MatrixToEulerXYZ(H);
                frame.RotationMatrix = RotationConverter.EulerXYZToMatrix(frame.Rotation);

                if (last3dPoints != null && R12 != null)
                {
                    last3dPoints = Utils.PutRTo4x4(frame.RotationMatrix).Multiply(last3dPoints);
                    R12          = frame.RotationMatrix.Multiply(R12);
                }
                else
                {
                    R12 = frame.RotationMatrix;
                }

                // 1c) Skip frame and wait for next one (but save matches)
                return(frame);
            }

            // 2) We have legit frames
            if (!FindTransformation.FindTwoViewsMatrices(match23.LeftPointsList, match23.RightPointsList, K,
                                                         out var F23, out var E23, out var R23, out var t23, out var X23))
            {
                // 3a) Or not
                isContinuous = false;
                return(null);
            }

            frame.Rotation       = RotationConverter.MatrixToEulerXYZ(R23);
            frame.RotationMatrix = R23;
            frame.Translation    = t23;

            // 3) Find same points between old frame and current one
            if (lastGoodMatch == null)
            {
                last3dPoints = X23;
                isContinuous = true;
            }
            else
            {
                #region NonContinousCase
                //if (!isContinuous) // This doesn't work well. Lets put it out of scope and just reset scale
                // {
                // Find correspondences between last right and new left
                //var match12 = lastGoodMatch;
                //var match34 = match23;
                //var match23_ = matcher(lastGoodRightImage, left); // TODO: make use of already found feature points

                //var correspondences23to34 = Correspondences.FindCorrespondences12to23(match23_, match34);

                //// Now extend each correspondence to 4 points - find if point on 2 is matched to some point on 1
                //var correspondences13to34 = new List<Correspondences.MatchPair>();
                //foreach(var c in correspondences23to34)
                //{
                //    var m23 = c.Match12;
                //    for (int i = 0; i < match12.Matches.Size; ++i)
                //    {
                //        if(match12.Matches[i].TrainIdx == m23.QueryIdx)
                //        {
                //            correspondences13to34.Add(new Correspondences.MatchPair()
                //            {
                //                Kp1 = match12.LeftKps[match12.Matches[i].QueryIdx],
                //                Kp2 = c.Kp2,
                //                Kp3 = c.Kp3
                //            });
                //        }
                //    }
                //}

                //if (correspondences13to34.Count >= MinimumCorrespondencesNeeded)
                //{
                //    var t13 = R12.Multiply(c12).Mul(-1);

                //    FindBestScale(R12, t13, R23, t23, K, correspondences13to34, MinimumCorrespondencesNeeded, out double scale, out double confidence, out List<int> inliers);

                //    t23 = t23.Mul(scale);
                //    frame.Translation = t23;

                //    FindTransformation.TriangulateChieral(match23.LeftPointsList, match23.RightPointsList, K, R23, t23, out last3dPoints);

                //    isContinuous = true;
                //}
                //else
                //{
                //    isContinuous = false;
                //}
                //  }
                #endregion
                if (isContinuous)
                {
                    var correspondences = Correspondences.FindCorrespondences12to23(lastGoodMatch, match23);
                    if (correspondences.Count >= MinimumCorrespondencesNeeded)
                    {
                        // Normalize to |t| = 1
                        t12 = t12.Mul(1.0 / t12.Norm);
                        t23 = t23.Mul(1.0 / t23.Norm);

                        FindBestScale(R12, t12, R23, t23, K, correspondences, MinimumCorrespondencesNeeded, out double scale, out double confidence, out List <int> inliers);

                        t23 = t23.Mul(scale);
                        frame.Translation = t23;

                        FindTransformation.TriangulateChieral(match23.LeftPointsList, match23.RightPointsList, K, R23, t23, out last3dPoints);
                    }
                    else
                    {
                        isContinuous = false;
                    }
                }
            }

            lastGoodMatch      = match23;
            lastGoodLeftImage  = left;
            lastGoodRightImage = right;
            R12 = R23;
            t12 = t23;
            c12 = R23.T().Multiply(t23).Mul(-1);

            return(frame);
        }