public void Camera_Pose_And_Triangulation_From_Fundamental() { Matrix <double> p; Matrix <double> p1; Matrix <double> e; CameraPoseAndTriangulationFromFundamentalResult result; #region Case 1 p = new Matrix <double>(new double[3, 4] { { 1, 0, 0, 0 }, { 0, 1, 0, 0 }, { 0, 0, 1, 0 } }); p1 = new Matrix <double>(new double[3, 4] { { 0.9870124281645626, 0.03547270126454876, -0.1566785055892807, -0.0008631055249466995 }, { -0.03193350943559628, 0.9991760497758493, 0.0250494017784099, -0.02589733540347029 }, { 0.1574379802456221, -0.01972077633678765, 0.9873319468936744, -0.9996642351649142 } }); e = new Matrix <double>(new double[3, 3] { { -0.4052652752518787, 12.45233689948151, -0.01348616168894345 }, { -11.47229370932243, -0.4564241889679639, 1.831549257782737 }, { 0.297551532077682, 0.001072849756781113, -0.04743653296950803 } }); var p1Init = new Matrix <double>(3, 4); p1Init.SetIdentity(); result = OpenCvUtilities.CameraPoseAndTriangulationFromFundamental(_calibration, Utils.GetKeyPointsVector(_trackedFeatures2), Utils.GetKeyPointsVector(_bootstrapKp), p1Init); Assert.AreEqual(196.532, result.Min, 0.1); Assert.AreEqual(428.096, result.Max, 0.1); CollectionAssert.AreEqual(e.Data, result.Esential.Data, _matrixComparer); CollectionAssert.AreEqual(p.Data, result.P1.Data, _matrixComparer); CollectionAssert.AreEqual(p1.Data, result.P2.Data, _matrixComparer); Assert.That(result.Result, Is.EqualTo(true)); #endregion }
public void Bootstrap_Track_Logic_Test() { var capture = new Capture($@"{TestCaseProjectPath}\Videos\cube2.avi"); //var capture = new Capture(@"C:\Users\zakharov\Documents\Repos\Mine\Rc\src\RubiksCube.OpenCV.TestCase\Videos\cube2.avi"); for (int i = 0; i < 40; i++) { capture.QueryFrame(); } var prevGray = capture.QueryFrame(); CvInvoke.CvtColor(prevGray, prevGray, ColorConversion.Bgr2Gray); var currentGray = capture.QueryFrame(); CvInvoke.CvtColor(currentGray, currentGray, ColorConversion.Bgr2Gray); var bootstrapKp = new VectorOfKeyPoint(); new ORBDetector().DetectRaw(prevGray, bootstrapKp); var trackedFeatures = new VectorOfKeyPoint(bootstrapKp.ToArray()); //------------------------------------------------------------------------- var pointComparer = Comparer <PointF> .Create((p1, p2) => Math.Abs(p1.X - p2.X) < 0.0001f && Math.Abs(p1.Y - p2.Y) < 0.0001f? 0 : 1); var point3DComparer = Comparer <MCvPoint3D32f> .Create((p1, p2) => Math.Abs(p1.X - p2.X) < 0.0001f && Math.Abs(p1.Y - p2.Y) < 0.0001f && Math.Abs(p1.Z - p2.Z) < 0.0001f? 0 : 1); var matrixComparer = Comparer <double> .Create((x, y) => Math.Abs(x - y) < 0.0001f? 0 : 1); for (int i = 41; i <= 95; i++) { var bootstrapPointsBeforeOpticalFlowCplusPlus = GetPoints($"I = {i}txt - Bootstrap points before optical flow.txt"); var trackedPointsBeforeOpticalFlowCplusPlus = GetPoints($"I = {i}txt - Tracked points before optical flow.txt"); var bootstrapPointsAfterOpticalFlowCplusPlus = GetPoints($"I = {i}txt - Bootstrap points after optical flow.txt"); var trackedPointsAfterOpticalFlowCplusPlus = GetPoints($"I = {i}txt - Tracked points after optical flow.txt"); var bootstrapPointsAfterHomographyCplusPlus = GetPoints($"I = {i}txt - Bootstrap points after homography.txt"); var trackedPointsAfterHomographyCplusPlus = GetPoints($"I = {i}txt - Tracked points after homography.txt"); var homographyCplusPlus = Getmatrix3X3($"I = {i}txt - Homography.txt"); var homographyMaskCplusPlus = GetByteVector($"I = {i}txt - Homography mask.txt"); var corners = new VectorOfPointF(); var status = new VectorOfByte(); var errors = new VectorOfFloat(); CollectionAssert.AreEqual(Utils.GetPointsVector(bootstrapKp).ToArray(), bootstrapPointsBeforeOpticalFlowCplusPlus.ToArray(), pointComparer); CollectionAssert.AreEqual(Utils.GetPointsVector(trackedFeatures).ToArray(), trackedPointsBeforeOpticalFlowCplusPlus.ToArray(), pointComparer); CvInvoke.CalcOpticalFlowPyrLK(prevGray, currentGray, Utils.GetPointsVector(trackedFeatures), corners, status, errors, new Size(11, 11), 3, new MCvTermCriteria(30, 0.01)); currentGray.CopyTo(prevGray); if (CvInvoke.CountNonZero(status) < status.Size * 0.8) { throw new Exception("Tracking failed."); } trackedFeatures = Utils.GetKeyPointsVector(corners); Utils.KeepVectorsByStatus(ref trackedFeatures, ref bootstrapKp, status); CollectionAssert.AreEqual(Utils.GetPointsVector(bootstrapKp).ToArray(), bootstrapPointsAfterOpticalFlowCplusPlus.ToArray(), pointComparer); CollectionAssert.AreEqual(Utils.GetPointsVector(trackedFeatures).ToArray(), trackedPointsAfterOpticalFlowCplusPlus.ToArray(), pointComparer); if (trackedFeatures.Size != bootstrapKp.Size) { const string error = "Tracked features vector size is not equal to bootstrapped one."; throw new Exception(error); } //verify features with a homography var inlierMask = new VectorOfByte(); var homography = new Mat(); if (trackedFeatures.Size > 4) { CvInvoke.FindHomography(Utils.GetPointsVector(trackedFeatures), Utils.GetPointsVector(bootstrapKp), homography, HomographyMethod.Ransac, 0.99, inlierMask); } var homographyMatrix = new Matrix <double>(homography.Rows, homography.Cols, homography.DataPointer); CollectionAssert.AreEqual(homographyMatrix.Data, homographyCplusPlus.Data, matrixComparer); int inliersNum = CvInvoke.CountNonZero(inlierMask); CollectionAssert.AreEqual(inlierMask.ToArray(), homographyMaskCplusPlus.ToArray()); if (inliersNum != trackedFeatures.Size && inliersNum >= 4 && !homography.IsEmpty) { Utils.KeepVectorsByStatus(ref trackedFeatures, ref bootstrapKp, inlierMask); } else if (inliersNum < 10) { throw new Exception("Not enough features survived homography."); } CollectionAssert.AreEqual(Utils.GetPointsVector(bootstrapKp).ToArray(), bootstrapPointsAfterHomographyCplusPlus.ToArray(), pointComparer); CollectionAssert.AreEqual(Utils.GetPointsVector(trackedFeatures).ToArray(), trackedPointsAfterHomographyCplusPlus.ToArray(), pointComparer); var bootstrapKpOrig = new VectorOfKeyPoint(bootstrapKp.ToArray()); var trackedFeaturesOrig = new VectorOfKeyPoint(trackedFeatures.ToArray()); //TODO: Compare all these to c++ version //Attempt at 3D reconstruction (triangulation) if conditions are right var rigidT = CvInvoke.EstimateRigidTransform(Utils.GetPointsVector(trackedFeatures).ToArray(), Utils.GetPointsVector(bootstrapKp).ToArray(), false); var matrix = new Matrix <double>(rigidT.Rows, rigidT.Cols, rigidT.DataPointer); if (CvInvoke.Norm(matrix.GetCol(2)) > 100) { var points3DCplusPlus = GetPoints3d($"I = {i}txt - 3d points.txt"); var eigenvectorsCplusPlus = Getmatrix3X3($"I = {i}txt - eigenvectors.txt"); var normalOfPlaneCplusPlus = GetDoubleArray($"I = {i}txt - normal of plane.txt"); //camera motion is sufficient var p1Init = new Matrix <double>(3, 4); p1Init.SetIdentity(); var result = OpenCvUtilities.CameraPoseAndTriangulationFromFundamental(_calibration, trackedFeatures, bootstrapKp, p1Init); trackedFeatures = result.FilteredTrackedFeaturesKp; bootstrapKp = result.FilteredBootstrapKp; if (result.Result) { double pToPlaneTrashCplusPlus = GetDouble($"I = {i}txt - p_to_plane_thresh.txt"); int numInliersCplusPlus = GetInt($"I = {i}txt - num inliers.txt"); var statusArrCplusPlus = GetByteVector($"I = {i}txt - status arr.txt"); var trackedFeatures3D = result.TrackedFeatures3D; CollectionAssert.AreEqual(trackedFeatures3D.ToArray(), points3DCplusPlus.ToArray(), point3DComparer); //var trackedFeatures3Dm = Utils.Get3dPointsMat(trackedFeatures3D); var tf3D = new double[trackedFeatures3D.Size, 3]; var trackedFeatures3Dm = new Matrix <double>(trackedFeatures3D.Size, 3); for (int k = 0; k < trackedFeatures3D.Size; k++) { trackedFeatures3Dm[k, 0] = trackedFeatures3D[k].X; trackedFeatures3Dm[k, 1] = trackedFeatures3D[k].Y; trackedFeatures3Dm[k, 2] = trackedFeatures3D[k].Z; tf3D[k, 0] = trackedFeatures3D[k].X; tf3D[k, 1] = trackedFeatures3D[k].Y; tf3D[k, 2] = trackedFeatures3D[k].Z; } var eigenvectors = new Mat(); var mean = new Mat(); CvInvoke.PCACompute(trackedFeatures3Dm, mean, eigenvectors); var eigenvectorsMatrix = new Matrix <double>(eigenvectors.Rows, eigenvectors.Cols, eigenvectors.DataPointer); CollectionAssert.AreEqual(eigenvectorsMatrix.Data, eigenvectorsCplusPlus.Data, matrixComparer); var method = PrincipalComponentMethod.Center; var pca = new PrincipalComponentAnalysis(method); pca.Learn(tf3D.ToJagged()); var meanMatrix = new Matrix <double>(mean.Rows, mean.Cols, mean.DataPointer); CollectionAssert.AreEqual(meanMatrix.Data.ToJagged()[0], pca.Means, matrixComparer); int numInliers = 0; var normalOfPlane = eigenvectorsMatrix.GetRow(2).ToUMat().ToMat(AccessType.Fast); CvInvoke.Normalize(normalOfPlane, normalOfPlane); var normalOfPlaneMatrix = new Matrix <double>(normalOfPlane.Rows, normalOfPlane.Cols, normalOfPlane.DataPointer); var normalOfPlaneArray = new[] { normalOfPlaneMatrix[0, 0], normalOfPlaneMatrix[0, 1], normalOfPlaneMatrix[0, 2] }; CollectionAssert.AreEqual(normalOfPlaneArray, normalOfPlaneCplusPlus, matrixComparer); double pToPlaneThresh = Math.Sqrt(pca.Eigenvalues.ElementAt(2)); Assert.AreEqual(pToPlaneTrashCplusPlus, pToPlaneThresh, 0.0001); var statusArray = new byte[trackedFeatures3D.Size]; for (int k = 0; k < trackedFeatures3D.Size; k++) { var t1 = new double[] { trackedFeatures3D[k].X, trackedFeatures3D[k].Y, trackedFeatures3D[k].Z }; var t2 = t1.Subtract(pca.Means); var w = new Matrix <double>(new[, ] { { t2[0], t2[1], t2[2] } }); double d = Math.Abs(normalOfPlane.Dot(w)); if (d < pToPlaneThresh) { numInliers++; statusArray[k] = 1; } } Assert.AreEqual(numInliersCplusPlus, numInliers); var statusVector = new VectorOfByte(statusArray); CollectionAssert.AreEqual(statusArrCplusPlus.ToArray(), statusVector.ToArray()); bool bootstrapping = numInliers / (double)trackedFeatures3D.Size < 0.75; if (!bootstrapping) { //enough features are coplanar, keep them and flatten them on the XY plane Utils.KeepVectorsByStatus(ref trackedFeatures, ref trackedFeatures3D, statusVector); //the PCA has the major axes of the plane var projected = new Mat(); CvInvoke.PCAProject(trackedFeatures3Dm, mean, eigenvectors, projected); var projectedMatrix = new Matrix <double>(projected.Rows, projected.Cols, projected.DataPointer); projectedMatrix.GetCol(2).SetValue(0); projectedMatrix.CopyTo(trackedFeatures3Dm); } else { bootstrapKp = bootstrapKpOrig; trackedFeatures = trackedFeaturesOrig; } } } currentGray = capture.QueryFrame(); CvInvoke.CvtColor(currentGray, currentGray, ColorConversion.Bgr2Gray); } }