public void Triangulate_And_Check_Reproj() { Matrix <double> p; Matrix <double> p1; TriangulateAndCheckReprojResult result; #region Case 1 p = new Matrix <double>(new double[3, 4] { { 1, 0, 0, 0 }, { 0, 1, 0, 0 }, { 0, 0, 1, 0 } }); p1 = new Matrix <double>(new double[3, 4] { { -0.9824809681811199, -0.00737816829847441, 0.1862168354216374, 0.01333896289977746 }, { 0.01799441939100593, -0.9983029386588546, 0.05538450627997073, 0.03436414273108072 }, { 0.1854921778207113, 0.05776508718642179, 0.9809464035663084f, 0.9993203579248829 } }); result = OpenCvUtilities.TriangulateAndCheckReproj(_calibration, Utils.GetKeyPointsVector(_trackedFeatures1), Utils.GetKeyPointsVector(_bootstrapKp), p, p1); Assert.That(result.Result, Is.EqualTo(false)); Assert.That(result.Error, Is.EqualTo(0)); #endregion #region Case 2 p = new Matrix <double>(new double[3, 4] { { 1, 0, 0, 0 }, { 0, 1, 0, 0 }, { 0, 0, 1, 0 } }); p1 = new Matrix <double>(new double[3, 4] { { 0.9870124281645626, 0.03547270126454876, -0.1566785055892807, -0.0008631055249466995 }, { -0.03193350943559628, 0.9991760497758493, 0.0250494017784099, -0.02589733540347029 }, { 0.1574379802456221, -0.01972077633678765, 0.9873319468936744, -0.9996642351649142 } }); result = OpenCvUtilities.TriangulateAndCheckReproj(_calibration, Utils.GetKeyPointsVector(_trackedFeatures2), Utils.GetKeyPointsVector(_bootstrapKp), p, p1); Assert.That(result.Result, Is.EqualTo(true)); Assert.AreEqual(2.90235f, result.Error, 0.01); #endregion }
public void Camera_Pose_And_Triangulation_From_Fundamental() { Matrix <double> p; Matrix <double> p1; Matrix <double> e; CameraPoseAndTriangulationFromFundamentalResult result; #region Case 1 p = new Matrix <double>(new double[3, 4] { { 1, 0, 0, 0 }, { 0, 1, 0, 0 }, { 0, 0, 1, 0 } }); p1 = new Matrix <double>(new double[3, 4] { { 0.9870124281645626, 0.03547270126454876, -0.1566785055892807, -0.0008631055249466995 }, { -0.03193350943559628, 0.9991760497758493, 0.0250494017784099, -0.02589733540347029 }, { 0.1574379802456221, -0.01972077633678765, 0.9873319468936744, -0.9996642351649142 } }); e = new Matrix <double>(new double[3, 3] { { -0.4052652752518787, 12.45233689948151, -0.01348616168894345 }, { -11.47229370932243, -0.4564241889679639, 1.831549257782737 }, { 0.297551532077682, 0.001072849756781113, -0.04743653296950803 } }); var p1Init = new Matrix <double>(3, 4); p1Init.SetIdentity(); result = OpenCvUtilities.CameraPoseAndTriangulationFromFundamental(_calibration, Utils.GetKeyPointsVector(_trackedFeatures2), Utils.GetKeyPointsVector(_bootstrapKp), p1Init); Assert.AreEqual(196.532, result.Min, 0.1); Assert.AreEqual(428.096, result.Max, 0.1); CollectionAssert.AreEqual(e.Data, result.Esential.Data, _matrixComparer); CollectionAssert.AreEqual(p.Data, result.P1.Data, _matrixComparer); CollectionAssert.AreEqual(p1.Data, result.P2.Data, _matrixComparer); Assert.That(result.Result, Is.EqualTo(true)); #endregion }
// Update is called once per frame void Update() { // Get the image of the camera using (Mat camImage = capture.RetrieveMat()) { Texture2D tex; // mirror image on y axis that its like a mirror Cv2.Flip(camImage, camImage, FlipMode.Y); // detect faces if (detectFaces) { Mat haarResult = DetectFace(camImage); // convert it to a Unity Texture2D tex = OpenCvUtilities.MatToTexture2D(haarResult); } else { tex = OpenCvUtilities.MatToTexture2D(camImage); } // For testing: save the image //File.WriteAllBytes(Application.dataPath + "/../SavedScreen.png", tex.EncodeToPNG()); // destroy old if exists, Unity wont clean it automatically if (backgroundMaterial.mainTexture != null) { Texture2D.DestroyImmediate(backgroundMaterial.mainTexture, true); } backgroundMaterial.mainTexture = tex; } }
public void Bootstrap_Track_Logic_Test() { var capture = new Capture($@"{TestCaseProjectPath}\Videos\cube2.avi"); //var capture = new Capture(@"C:\Users\zakharov\Documents\Repos\Mine\Rc\src\RubiksCube.OpenCV.TestCase\Videos\cube2.avi"); for (int i = 0; i < 40; i++) { capture.QueryFrame(); } var prevGray = capture.QueryFrame(); CvInvoke.CvtColor(prevGray, prevGray, ColorConversion.Bgr2Gray); var currentGray = capture.QueryFrame(); CvInvoke.CvtColor(currentGray, currentGray, ColorConversion.Bgr2Gray); var bootstrapKp = new VectorOfKeyPoint(); new ORBDetector().DetectRaw(prevGray, bootstrapKp); var trackedFeatures = new VectorOfKeyPoint(bootstrapKp.ToArray()); //------------------------------------------------------------------------- var pointComparer = Comparer <PointF> .Create((p1, p2) => Math.Abs(p1.X - p2.X) < 0.0001f && Math.Abs(p1.Y - p2.Y) < 0.0001f? 0 : 1); var point3DComparer = Comparer <MCvPoint3D32f> .Create((p1, p2) => Math.Abs(p1.X - p2.X) < 0.0001f && Math.Abs(p1.Y - p2.Y) < 0.0001f && Math.Abs(p1.Z - p2.Z) < 0.0001f? 0 : 1); var matrixComparer = Comparer <double> .Create((x, y) => Math.Abs(x - y) < 0.0001f? 0 : 1); for (int i = 41; i <= 95; i++) { var bootstrapPointsBeforeOpticalFlowCplusPlus = GetPoints($"I = {i}txt - Bootstrap points before optical flow.txt"); var trackedPointsBeforeOpticalFlowCplusPlus = GetPoints($"I = {i}txt - Tracked points before optical flow.txt"); var bootstrapPointsAfterOpticalFlowCplusPlus = GetPoints($"I = {i}txt - Bootstrap points after optical flow.txt"); var trackedPointsAfterOpticalFlowCplusPlus = GetPoints($"I = {i}txt - Tracked points after optical flow.txt"); var bootstrapPointsAfterHomographyCplusPlus = GetPoints($"I = {i}txt - Bootstrap points after homography.txt"); var trackedPointsAfterHomographyCplusPlus = GetPoints($"I = {i}txt - Tracked points after homography.txt"); var homographyCplusPlus = Getmatrix3X3($"I = {i}txt - Homography.txt"); var homographyMaskCplusPlus = GetByteVector($"I = {i}txt - Homography mask.txt"); var corners = new VectorOfPointF(); var status = new VectorOfByte(); var errors = new VectorOfFloat(); CollectionAssert.AreEqual(Utils.GetPointsVector(bootstrapKp).ToArray(), bootstrapPointsBeforeOpticalFlowCplusPlus.ToArray(), pointComparer); CollectionAssert.AreEqual(Utils.GetPointsVector(trackedFeatures).ToArray(), trackedPointsBeforeOpticalFlowCplusPlus.ToArray(), pointComparer); CvInvoke.CalcOpticalFlowPyrLK(prevGray, currentGray, Utils.GetPointsVector(trackedFeatures), corners, status, errors, new Size(11, 11), 3, new MCvTermCriteria(30, 0.01)); currentGray.CopyTo(prevGray); if (CvInvoke.CountNonZero(status) < status.Size * 0.8) { throw new Exception("Tracking failed."); } trackedFeatures = Utils.GetKeyPointsVector(corners); Utils.KeepVectorsByStatus(ref trackedFeatures, ref bootstrapKp, status); CollectionAssert.AreEqual(Utils.GetPointsVector(bootstrapKp).ToArray(), bootstrapPointsAfterOpticalFlowCplusPlus.ToArray(), pointComparer); CollectionAssert.AreEqual(Utils.GetPointsVector(trackedFeatures).ToArray(), trackedPointsAfterOpticalFlowCplusPlus.ToArray(), pointComparer); if (trackedFeatures.Size != bootstrapKp.Size) { const string error = "Tracked features vector size is not equal to bootstrapped one."; throw new Exception(error); } //verify features with a homography var inlierMask = new VectorOfByte(); var homography = new Mat(); if (trackedFeatures.Size > 4) { CvInvoke.FindHomography(Utils.GetPointsVector(trackedFeatures), Utils.GetPointsVector(bootstrapKp), homography, HomographyMethod.Ransac, 0.99, inlierMask); } var homographyMatrix = new Matrix <double>(homography.Rows, homography.Cols, homography.DataPointer); CollectionAssert.AreEqual(homographyMatrix.Data, homographyCplusPlus.Data, matrixComparer); int inliersNum = CvInvoke.CountNonZero(inlierMask); CollectionAssert.AreEqual(inlierMask.ToArray(), homographyMaskCplusPlus.ToArray()); if (inliersNum != trackedFeatures.Size && inliersNum >= 4 && !homography.IsEmpty) { Utils.KeepVectorsByStatus(ref trackedFeatures, ref bootstrapKp, inlierMask); } else if (inliersNum < 10) { throw new Exception("Not enough features survived homography."); } CollectionAssert.AreEqual(Utils.GetPointsVector(bootstrapKp).ToArray(), bootstrapPointsAfterHomographyCplusPlus.ToArray(), pointComparer); CollectionAssert.AreEqual(Utils.GetPointsVector(trackedFeatures).ToArray(), trackedPointsAfterHomographyCplusPlus.ToArray(), pointComparer); var bootstrapKpOrig = new VectorOfKeyPoint(bootstrapKp.ToArray()); var trackedFeaturesOrig = new VectorOfKeyPoint(trackedFeatures.ToArray()); //TODO: Compare all these to c++ version //Attempt at 3D reconstruction (triangulation) if conditions are right var rigidT = CvInvoke.EstimateRigidTransform(Utils.GetPointsVector(trackedFeatures).ToArray(), Utils.GetPointsVector(bootstrapKp).ToArray(), false); var matrix = new Matrix <double>(rigidT.Rows, rigidT.Cols, rigidT.DataPointer); if (CvInvoke.Norm(matrix.GetCol(2)) > 100) { var points3DCplusPlus = GetPoints3d($"I = {i}txt - 3d points.txt"); var eigenvectorsCplusPlus = Getmatrix3X3($"I = {i}txt - eigenvectors.txt"); var normalOfPlaneCplusPlus = GetDoubleArray($"I = {i}txt - normal of plane.txt"); //camera motion is sufficient var p1Init = new Matrix <double>(3, 4); p1Init.SetIdentity(); var result = OpenCvUtilities.CameraPoseAndTriangulationFromFundamental(_calibration, trackedFeatures, bootstrapKp, p1Init); trackedFeatures = result.FilteredTrackedFeaturesKp; bootstrapKp = result.FilteredBootstrapKp; if (result.Result) { double pToPlaneTrashCplusPlus = GetDouble($"I = {i}txt - p_to_plane_thresh.txt"); int numInliersCplusPlus = GetInt($"I = {i}txt - num inliers.txt"); var statusArrCplusPlus = GetByteVector($"I = {i}txt - status arr.txt"); var trackedFeatures3D = result.TrackedFeatures3D; CollectionAssert.AreEqual(trackedFeatures3D.ToArray(), points3DCplusPlus.ToArray(), point3DComparer); //var trackedFeatures3Dm = Utils.Get3dPointsMat(trackedFeatures3D); var tf3D = new double[trackedFeatures3D.Size, 3]; var trackedFeatures3Dm = new Matrix <double>(trackedFeatures3D.Size, 3); for (int k = 0; k < trackedFeatures3D.Size; k++) { trackedFeatures3Dm[k, 0] = trackedFeatures3D[k].X; trackedFeatures3Dm[k, 1] = trackedFeatures3D[k].Y; trackedFeatures3Dm[k, 2] = trackedFeatures3D[k].Z; tf3D[k, 0] = trackedFeatures3D[k].X; tf3D[k, 1] = trackedFeatures3D[k].Y; tf3D[k, 2] = trackedFeatures3D[k].Z; } var eigenvectors = new Mat(); var mean = new Mat(); CvInvoke.PCACompute(trackedFeatures3Dm, mean, eigenvectors); var eigenvectorsMatrix = new Matrix <double>(eigenvectors.Rows, eigenvectors.Cols, eigenvectors.DataPointer); CollectionAssert.AreEqual(eigenvectorsMatrix.Data, eigenvectorsCplusPlus.Data, matrixComparer); var method = PrincipalComponentMethod.Center; var pca = new PrincipalComponentAnalysis(method); pca.Learn(tf3D.ToJagged()); var meanMatrix = new Matrix <double>(mean.Rows, mean.Cols, mean.DataPointer); CollectionAssert.AreEqual(meanMatrix.Data.ToJagged()[0], pca.Means, matrixComparer); int numInliers = 0; var normalOfPlane = eigenvectorsMatrix.GetRow(2).ToUMat().ToMat(AccessType.Fast); CvInvoke.Normalize(normalOfPlane, normalOfPlane); var normalOfPlaneMatrix = new Matrix <double>(normalOfPlane.Rows, normalOfPlane.Cols, normalOfPlane.DataPointer); var normalOfPlaneArray = new[] { normalOfPlaneMatrix[0, 0], normalOfPlaneMatrix[0, 1], normalOfPlaneMatrix[0, 2] }; CollectionAssert.AreEqual(normalOfPlaneArray, normalOfPlaneCplusPlus, matrixComparer); double pToPlaneThresh = Math.Sqrt(pca.Eigenvalues.ElementAt(2)); Assert.AreEqual(pToPlaneTrashCplusPlus, pToPlaneThresh, 0.0001); var statusArray = new byte[trackedFeatures3D.Size]; for (int k = 0; k < trackedFeatures3D.Size; k++) { var t1 = new double[] { trackedFeatures3D[k].X, trackedFeatures3D[k].Y, trackedFeatures3D[k].Z }; var t2 = t1.Subtract(pca.Means); var w = new Matrix <double>(new[, ] { { t2[0], t2[1], t2[2] } }); double d = Math.Abs(normalOfPlane.Dot(w)); if (d < pToPlaneThresh) { numInliers++; statusArray[k] = 1; } } Assert.AreEqual(numInliersCplusPlus, numInliers); var statusVector = new VectorOfByte(statusArray); CollectionAssert.AreEqual(statusArrCplusPlus.ToArray(), statusVector.ToArray()); bool bootstrapping = numInliers / (double)trackedFeatures3D.Size < 0.75; if (!bootstrapping) { //enough features are coplanar, keep them and flatten them on the XY plane Utils.KeepVectorsByStatus(ref trackedFeatures, ref trackedFeatures3D, statusVector); //the PCA has the major axes of the plane var projected = new Mat(); CvInvoke.PCAProject(trackedFeatures3Dm, mean, eigenvectors, projected); var projectedMatrix = new Matrix <double>(projected.Rows, projected.Cols, projected.DataPointer); projectedMatrix.GetCol(2).SetValue(0); projectedMatrix.CopyTo(trackedFeatures3Dm); } else { bootstrapKp = bootstrapKpOrig; trackedFeatures = trackedFeaturesOrig; } } } currentGray = capture.QueryFrame(); CvInvoke.CvtColor(currentGray, currentGray, ColorConversion.Bgr2Gray); } }
public void Decompose_Eto_Rand() { Matrix <double> e; Matrix <double> r1; Matrix <double> r2; Matrix <double> t1; Matrix <double> t2; Matrix <double> r1Expected; Matrix <double> r2Expected; Matrix <double> t1Expected; Matrix <double> t2Expected; #region Case 1 e = new Matrix <double>(new[, ] { { -0.0793904, -15.101, 0.371601 }, { 14.1124, 0.375567, -2.48662 }, { -0.48423, 0.188653, 0.0805487 } }); r1Expected = new Matrix <double>(new[, ] { { -0.982481, -0.00737817, 0.186217 }, { 0.0179944, -0.998303, 0.0553845 }, { 0.185492, 0.0577651, 0.980946 } }); r2Expected = new Matrix <double>(new[, ] { { 0.987093, 0.00800034, -0.159948 }, { -0.00611273, 0.999906, 0.01229 }, { 0.160031, -0.0111536, 0.987049 } }); t1Expected = new Matrix <double>(new[] { 0.013339, 0.0343641, 0.99932 }); t2Expected = new Matrix <double>(new[] { -0.013339, -0.0343641, -0.99932 }); OpenCvUtilities.DecomposeEtoRandT(e, out r1, out r2, out t1, out t2); CollectionAssert.AreEqual(r1Expected.Data, r1.Data, _matrixComparer); CollectionAssert.AreEqual(r2Expected.Data, r2.Data, _matrixComparer); CollectionAssert.AreEqual(t1Expected.Data, t1.Data, _matrixComparer); CollectionAssert.AreEqual(t2Expected.Data, t2.Data, _matrixComparer); #endregion #region Case 2 e = new Matrix <double>(new[, ] { { 0.4052652752518787, -12.45233689948151, 0.01348616168894345 }, { 11.47229370932243, 0.4564241889679639, -1.831549257782737 }, { -0.297551532077682, -0.001072849756781113, 0.04743653296950803 } }); r1Expected = new Matrix <double>(new[, ] { { -0.9870124281645626, -0.03547270126454876, 0.1566785055892807 }, { 0.03193350943559628, -0.9991760497758493, -0.0250494017784099 }, { -0.1574379802456221, 0.01972077633678765, -0.9873319468936744 } }); r2Expected = new Matrix <double>(new[, ] { { 0.9867407052466314, 0.03546201177308789, -0.1583831630268374 }, { -0.04008650966451568, 0.9988553133911859, -0.02609855653312132 }, { -0.1572763566220209, -0.0321015362747831, -0.9870327446526289 } }); t1Expected = new Matrix <double>(new[] { -0.0008631055249466995, -0.02589733540347029, -0.9996642351649142 }); t2Expected = new Matrix <double>(new[] { 0.0008631055249466995, 0.02589733540347029, 0.9996642351649142 }); OpenCvUtilities.DecomposeEtoRandT(e, out r1, out r2, out t1, out t2); CollectionAssert.AreEqual(r1Expected.Data, r1.Data, _matrixComparer); CollectionAssert.AreEqual(r2Expected.Data, r2.Data, _matrixComparer); CollectionAssert.AreEqual(t1Expected.Data, t1.Data, _matrixComparer); CollectionAssert.AreEqual(t2Expected.Data, t2.Data, _matrixComparer); #endregion #region Case 3 e = new Matrix <double>(new[, ] { { -0.4052652752518787, 12.45233689948151, -0.01348616168894345 }, { -11.47229370932243, -0.4564241889679639, 1.831549257782737 }, { 0.297551532077682, 0.001072849756781113, -0.04743653296950803 } }); r1Expected = new Matrix <double>(new[, ] { { 0.9870124281645626, 0.03547270126454876, -0.1566785055892807 }, { -0.03193350943559628, 0.9991760497758493, 0.0250494017784099 }, { 0.1574379802456221, -0.01972077633678765, 0.9873319468936744 } }); r2Expected = new Matrix <double>(new[, ] { { -0.9867407052466314, -0.03546201177308789, 0.1583831630268374 }, { 0.04008650966451568, -0.9988553133911859, 0.02609855653312132 }, { 0.1572763566220209, 0.0321015362747831, 0.9870327446526289 } }); t1Expected = new Matrix <double>(new[] { 0.0008631055249466995, 0.02589733540347029, 0.9996642351649142 }); t2Expected = new Matrix <double>(new[] { -0.0008631055249466995, -0.02589733540347029, -0.9996642351649142 }); OpenCvUtilities.DecomposeEtoRandT(e, out r1, out r2, out t1, out t2); CollectionAssert.AreEqual(r1Expected.Data, r1.Data, _matrixComparer); CollectionAssert.AreEqual(r2Expected.Data, r2.Data, _matrixComparer); CollectionAssert.AreEqual(t1Expected.Data, t1.Data, _matrixComparer); CollectionAssert.AreEqual(t2Expected.Data, t2.Data, _matrixComparer); #endregion }