Esempio n. 1
1
 public PointF[] GetFeaturesToTrack(Image<Gray, byte> img, VisualOdometerFeaturesToTrackParams parameters)
 {
     var param = (VisualOdometerFeaturesToTrackParamsST)parameters;
     int MaxFeaturesCount = param.MaxFeaturesCount;// = 400;
     double QualityLevel = param.QualityLevel;// = 0.01;
     double MinDistance = param.MinDistance;// = 1;
     int BlockSize = param.BlockSize;// = 10;
     var goodFeatures = img.GoodFeaturesToTrack(MaxFeaturesCount, QualityLevel, MinDistance, BlockSize);
     return goodFeatures[0];
 }
Esempio n. 2
0
        public MCvPoint3D64f? GetTranslationAndRotation(
            double[][] rotMatrArray,
            OpticFlowFrameContainer prevFrame,
            OpticFlowFrameContainer currFrame,
            StereoCameraParams cameraParams,
            out List<PointF> currFeaturesList,
            out List<PointF> prevFeaturesList,
            out Matrix<double> resRotation,
            VisualOdometerFeaturesToTrackParams featuresToTrackParams,
            VisualOdometerFeaturesOpticFlowParams featuresOpticFlowParams,
            VisualOdometerDisparitiesParams disparitiesParams)
        {
            if (
                rotMatrArray == null ||
                prevFrame == null ||
                currFrame == null ||
                cameraParams == null)
            {
                prevFeaturesList = null;
                currFeaturesList = null;
                resRotation = null;
                return null;
            }

            var leftPrevGrayImg = new Image<Gray, byte>(prevFrame.StereoFrame.LeftRawFrame);
            var rightPrevGrayImg = new Image<Gray, byte>(prevFrame.StereoFrame.RightRawFrame);

            var leftCurrGrayImg = new Image<Gray, byte>(currFrame.StereoFrame.LeftRawFrame);
            var rightCurrGrayImg = new Image<Gray, byte>(currFrame.StereoFrame.RightRawFrame);

            var prevFeatures = GetFeaturesToTrack(leftPrevGrayImg, featuresToTrackParams);
            PointF[] prevCorrFeatures;
            PointF[] currCorrFeatures;

            GetFeaturesOpticFlow(prevFeatures, leftPrevGrayImg, leftCurrGrayImg, out prevCorrFeatures, out currCorrFeatures, featuresOpticFlowParams);

            var prevDisps = GetDisparities(leftPrevGrayImg, rightPrevGrayImg, prevCorrFeatures, prevFrame.DepthMapImg, disparitiesParams);
            var prev3dPoints = ReprojectTo3d(leftPrevGrayImg, prevDisps, prevCorrFeatures, cameraParams.Q);

            var currDisps = GetDisparities(leftCurrGrayImg, rightCurrGrayImg, currCorrFeatures, currFrame.DepthMapImg, disparitiesParams);
            var curr3dPoints = ReprojectTo3d(leftCurrGrayImg, currDisps, currCorrFeatures, cameraParams.Q);

            var prevAct3dPointsList = new List<Matrix<double>>();
            var currAct3dPointsList = new List<Matrix<double>>();

            for (int i = 0; i < prev3dPoints.Count(); ++i)
            {
                if (prev3dPoints[i] != null && curr3dPoints[i] != null)
                {
                    prevAct3dPointsList.Add(prev3dPoints[i]);
                    currAct3dPointsList.Add(curr3dPoints[i]);
                }
            }

            //old reproject
            //var depthSize = prevFrame.DepthMapImg.Size;
            //var reprojPrevDepthMap = new Image<Gray, short>(prevFrame.DepthMapImg.Size);
            //var reprojCurrDepthMap = new Image<Gray, short>(currFrame.DepthMapImg.Size);

            //for (int i = 0; i < depthSize.Height; ++i)
            //{
            //    for (int j = 0; j < depthSize.Width; ++j)
            //    {
            //        reprojPrevDepthMap.Data[i, j, 0] = 0;
            //        //reprojPrevDepthMap.Data[i, j, 1] = 0;
            //        //reprojPrevDepthMap.Data[i, j, 2] = 0;

            //        reprojCurrDepthMap.Data[i, j, 0] = 0;
            //        //reprojCurrDepthMap.Data[i, j, 1] = 0;
            //        //reprojCurrDepthMap.Data[i, j, 2] = 0;
            //    }
            //}

            //var actPrevFreatures = new List<PointF>();
            //var actCurrFreatures = new List<PointF>();

            //for (int i = 0; i < prevFeatures.Count(); ++i)
            //{
            //    if (status[i] == 1)
            //    {
            //        actPrevFreatures.Add(prevFeatures[i]);
            //        actCurrFreatures.Add(currFeatures[i]);

            //        int xp = (int)prevFeatures[i].X;
            //        int yp = (int)prevFeatures[i].Y;

            //        int xc = (int)currFeatures[i].X;
            //        int yc = (int)currFeatures[i].Y;

            //        if (yp < reprojPrevDepthMap.Height && yp >=0 && xp < reprojPrevDepthMap.Width && xp >= 0)
            //        {
            //            if (yc < reprojCurrDepthMap.Height && yc >= 0 && xc < reprojCurrDepthMap.Width && xc >= 0)
            //            {
            //                reprojPrevDepthMap.Data[yp, xp, 0] = prevFrame.DepthMapImg.Data[yp, xp, 0];
            //                reprojCurrDepthMap.Data[yc, xc, 0] = currFrame.DepthMapImg.Data[yc, xc, 0];
            //            }
            //        }
            //        //reprojPrevDepthMap.Data[yp, xp, 1] = prevFrame.DepthMapImg.Data[yp, xp, 1];
            //        //reprojPrevDepthMap.Data[yp, xp, 2] = prevFrame.DepthMapImg.Data[yp, xp, 2];

            //        //reprojCurrDepthMap.Data[yc, xc, 1] = currFrame.DepthMapImg.Data[yc, xc, 1];
            //        //reprojCurrDepthMap.Data[yc, xc, 2] = currFrame.DepthMapImg.Data[yc, xc, 2];
            //    }
            //}

            //var prevPoints = PointCollection.ReprojectImageTo3D(reprojPrevDepthMap, cameraParams.Q);
            //var currPoints = PointCollection.ReprojectImageTo3D(reprojCurrDepthMap, cameraParams.Q);

            //var maxZ = prevPoints.Max(x => x.z);
            //var actPrevPoints = prevPoints.Where(x => x.z != maxZ).ToArray();
            //var actCurrPoints = currPoints.Where(x => x.z != maxZ).ToArray();
            //// old reproject end
            var actPrevPoints = prevAct3dPointsList.ToArray();
            var actCurrPoints = currAct3dPointsList.ToArray();
            var prevCentroid = GetCentroid(actPrevPoints);
            var currCentroid = GetCentroid(actCurrPoints);

            var rotMatrix = Utils.CvHelper.ArrayToMatrix(rotMatrArray, new Size(3, 3));
            Matrix<double> objRotMatrix;
            Matrix<double> resCamRotMatrix;
            //rotation with IMU
            //var objRotMatrix = Utils.CvHelper.InverseMatrix(rotMatrix);

            //rotation with SVD

            var detX = GetSVDRotation(actPrevPoints, actCurrPoints, out objRotMatrix);
            if (detX < 0)
            {
                objRotMatrix.SetIdentity();
                Console.WriteLine("detX = {0}", detX);
            }
            resCamRotMatrix = Utils.CvHelper.InverseMatrix(objRotMatrix);

            var rotPrevCentr = objRotMatrix.Mul(prevCentroid);

            //raw tranalstion
            var rawT = new Matrix<double>(rotPrevCentr.Size);
            rawT[0, 0] = currCentroid[0, 0] - rotPrevCentr[0, 0];
            rawT[1, 0] = currCentroid[1, 0] - rotPrevCentr[1, 0];
            rawT[2, 0] = currCentroid[2, 0] - rotPrevCentr[2, 0];

            //camera translation
            var camT = resCamRotMatrix.Mul(rawT);
            //var camT = rawT;

            var X = camT[0, 0];
            var Y = camT[1, 0];
            var Z = camT[2, 0];

            prevFeaturesList = prevCorrFeatures.ToList();
            currFeaturesList = currCorrFeatures.ToList();
            resRotation = resCamRotMatrix;
            return new MCvPoint3D64f(X, Y, Z);
        }