Exemplo n.º 1
0
        public double[] GetDisparitiesLK(Image<Gray, byte> leftImg, Image<Gray, byte> rightImg, PointF[] points, Image<Gray, short> precalcDepthMap, VisualOdometerDisparitiesParams parameters)
        {
            var param = (VisualOdometerDisparitiesParamsLK)parameters;
            var res = new double[points.Count()];

            Size WinSize = param.WinSize;// new Size(80, 80);
            int PyrLevel = param.PyrLevel;// 4;
            MCvTermCriteria PyrLkTerm = param.PyrLkTerm;// new MCvTermCriteria(100, 0.001);

            var status = new Byte[points.Count()];
            var error = new float[points.Count()];
            var rightPoints = new PointF[points.Count()];

            var subCorners = new PointF[1][];
            subCorners[0] = points;
            leftImg.FindCornerSubPix(
                subCorners,
                new Size(11, 11),
                new Size(-1, -1),
                new MCvTermCriteria(30, 0.01));

            var leftCorners = subCorners[0];

            var gpuP = new GpuPyrLKOpticalFlow(WinSize, PyrLevel, 30, false);

            OpticalFlow.PyrLK(
                leftImg,
                rightImg,
                leftCorners,
                WinSize,
                PyrLevel,
                PyrLkTerm,
                out rightPoints,
                out status,
                out error);

            for (int i = 0; i < points.Count(); ++i)
            {
                if (status[i] == 1)
                {
                    var disp = leftCorners[i].X - rightPoints[i].X;
                    if (disp < 0)
                    {
                        res[i] = -1;
                    }
                    else
                    {
                        res[i] = disp;
                    }
                }
                else
                {
                    res[i] = -1;
                }
            }

            return res;
        }
Exemplo n.º 2
0
        public double[] GetDisparities(Image<Gray, byte> leftImg, Image<Gray, byte> rightImg, PointF[] points, Image<Gray, short> precalcDepthMap, VisualOdometerDisparitiesParams parameters)
        {
            //TEST LK
            //return this.GetDisparitiesLK(leftImg, rightImg, points, precalcDepthMap, parameters);
            ////
            var res = new double[points.Count()];

            for (int i = 0; i < points.Count(); ++i)
            {
                if (points[i].X >= leftImg.Width || points[i].X < 0 || points[i].Y >= leftImg.Height || points[i].Y < 0)
                {
                    res[i] = -1;
                    continue;
                }

                res[i] = precalcDepthMap[(int)points[i].Y, (int)points[i].X].Intensity;
            }

            return res;
        }
Exemplo n.º 3
0
        public MCvPoint3D64f? GetTranslationAndRotation(
            double[][] rotMatrArray,
            OpticFlowFrameContainer prevFrame,
            OpticFlowFrameContainer currFrame,
            StereoCameraParams cameraParams,
            out List<PointF> currFeaturesList,
            out List<PointF> prevFeaturesList,
            out Matrix<double> resRotation,
            VisualOdometerFeaturesToTrackParams featuresToTrackParams,
            VisualOdometerFeaturesOpticFlowParams featuresOpticFlowParams,
            VisualOdometerDisparitiesParams disparitiesParams)
        {
            if (
                rotMatrArray == null ||
                prevFrame == null ||
                currFrame == null ||
                cameraParams == null)
            {
                prevFeaturesList = null;
                currFeaturesList = null;
                resRotation = null;
                return null;
            }

            var leftPrevGrayImg = new Image<Gray, byte>(prevFrame.StereoFrame.LeftRawFrame);
            var rightPrevGrayImg = new Image<Gray, byte>(prevFrame.StereoFrame.RightRawFrame);

            var leftCurrGrayImg = new Image<Gray, byte>(currFrame.StereoFrame.LeftRawFrame);
            var rightCurrGrayImg = new Image<Gray, byte>(currFrame.StereoFrame.RightRawFrame);

            var prevFeatures = GetFeaturesToTrack(leftPrevGrayImg, featuresToTrackParams);
            PointF[] prevCorrFeatures;
            PointF[] currCorrFeatures;

            GetFeaturesOpticFlow(prevFeatures, leftPrevGrayImg, leftCurrGrayImg, out prevCorrFeatures, out currCorrFeatures, featuresOpticFlowParams);

            var prevDisps = GetDisparities(leftPrevGrayImg, rightPrevGrayImg, prevCorrFeatures, prevFrame.DepthMapImg, disparitiesParams);
            var prev3dPoints = ReprojectTo3d(leftPrevGrayImg, prevDisps, prevCorrFeatures, cameraParams.Q);

            var currDisps = GetDisparities(leftCurrGrayImg, rightCurrGrayImg, currCorrFeatures, currFrame.DepthMapImg, disparitiesParams);
            var curr3dPoints = ReprojectTo3d(leftCurrGrayImg, currDisps, currCorrFeatures, cameraParams.Q);

            var prevAct3dPointsList = new List<Matrix<double>>();
            var currAct3dPointsList = new List<Matrix<double>>();

            for (int i = 0; i < prev3dPoints.Count(); ++i)
            {
                if (prev3dPoints[i] != null && curr3dPoints[i] != null)
                {
                    prevAct3dPointsList.Add(prev3dPoints[i]);
                    currAct3dPointsList.Add(curr3dPoints[i]);
                }
            }

            //old reproject
            //var depthSize = prevFrame.DepthMapImg.Size;
            //var reprojPrevDepthMap = new Image<Gray, short>(prevFrame.DepthMapImg.Size);
            //var reprojCurrDepthMap = new Image<Gray, short>(currFrame.DepthMapImg.Size);

            //for (int i = 0; i < depthSize.Height; ++i)
            //{
            //    for (int j = 0; j < depthSize.Width; ++j)
            //    {
            //        reprojPrevDepthMap.Data[i, j, 0] = 0;
            //        //reprojPrevDepthMap.Data[i, j, 1] = 0;
            //        //reprojPrevDepthMap.Data[i, j, 2] = 0;

            //        reprojCurrDepthMap.Data[i, j, 0] = 0;
            //        //reprojCurrDepthMap.Data[i, j, 1] = 0;
            //        //reprojCurrDepthMap.Data[i, j, 2] = 0;
            //    }
            //}

            //var actPrevFreatures = new List<PointF>();
            //var actCurrFreatures = new List<PointF>();

            //for (int i = 0; i < prevFeatures.Count(); ++i)
            //{
            //    if (status[i] == 1)
            //    {
            //        actPrevFreatures.Add(prevFeatures[i]);
            //        actCurrFreatures.Add(currFeatures[i]);

            //        int xp = (int)prevFeatures[i].X;
            //        int yp = (int)prevFeatures[i].Y;

            //        int xc = (int)currFeatures[i].X;
            //        int yc = (int)currFeatures[i].Y;

            //        if (yp < reprojPrevDepthMap.Height && yp >=0 && xp < reprojPrevDepthMap.Width && xp >= 0)
            //        {
            //            if (yc < reprojCurrDepthMap.Height && yc >= 0 && xc < reprojCurrDepthMap.Width && xc >= 0)
            //            {
            //                reprojPrevDepthMap.Data[yp, xp, 0] = prevFrame.DepthMapImg.Data[yp, xp, 0];
            //                reprojCurrDepthMap.Data[yc, xc, 0] = currFrame.DepthMapImg.Data[yc, xc, 0];
            //            }
            //        }
            //        //reprojPrevDepthMap.Data[yp, xp, 1] = prevFrame.DepthMapImg.Data[yp, xp, 1];
            //        //reprojPrevDepthMap.Data[yp, xp, 2] = prevFrame.DepthMapImg.Data[yp, xp, 2];

            //        //reprojCurrDepthMap.Data[yc, xc, 1] = currFrame.DepthMapImg.Data[yc, xc, 1];
            //        //reprojCurrDepthMap.Data[yc, xc, 2] = currFrame.DepthMapImg.Data[yc, xc, 2];
            //    }
            //}

            //var prevPoints = PointCollection.ReprojectImageTo3D(reprojPrevDepthMap, cameraParams.Q);
            //var currPoints = PointCollection.ReprojectImageTo3D(reprojCurrDepthMap, cameraParams.Q);

            //var maxZ = prevPoints.Max(x => x.z);
            //var actPrevPoints = prevPoints.Where(x => x.z != maxZ).ToArray();
            //var actCurrPoints = currPoints.Where(x => x.z != maxZ).ToArray();
            //// old reproject end
            var actPrevPoints = prevAct3dPointsList.ToArray();
            var actCurrPoints = currAct3dPointsList.ToArray();
            var prevCentroid = GetCentroid(actPrevPoints);
            var currCentroid = GetCentroid(actCurrPoints);

            var rotMatrix = Utils.CvHelper.ArrayToMatrix(rotMatrArray, new Size(3, 3));
            Matrix<double> objRotMatrix;
            Matrix<double> resCamRotMatrix;
            //rotation with IMU
            //var objRotMatrix = Utils.CvHelper.InverseMatrix(rotMatrix);

            //rotation with SVD

            var detX = GetSVDRotation(actPrevPoints, actCurrPoints, out objRotMatrix);
            if (detX < 0)
            {
                objRotMatrix.SetIdentity();
                Console.WriteLine("detX = {0}", detX);
            }
            resCamRotMatrix = Utils.CvHelper.InverseMatrix(objRotMatrix);

            var rotPrevCentr = objRotMatrix.Mul(prevCentroid);

            //raw tranalstion
            var rawT = new Matrix<double>(rotPrevCentr.Size);
            rawT[0, 0] = currCentroid[0, 0] - rotPrevCentr[0, 0];
            rawT[1, 0] = currCentroid[1, 0] - rotPrevCentr[1, 0];
            rawT[2, 0] = currCentroid[2, 0] - rotPrevCentr[2, 0];

            //camera translation
            var camT = resCamRotMatrix.Mul(rawT);
            //var camT = rawT;

            var X = camT[0, 0];
            var Y = camT[1, 0];
            var Z = camT[2, 0];

            prevFeaturesList = prevCorrFeatures.ToList();
            currFeaturesList = currCorrFeatures.ToList();
            resRotation = resCamRotMatrix;
            return new MCvPoint3D64f(X, Y, Z);
        }