/// <summary> /// /// </summary> /// <param name="calibrationInfo"></param> /// <param name="trackedFeaturesKp"></param> /// <param name="bootstrapKp"></param> /// <param name="p"></param> /// <param name="p1"></param> /// <returns></returns> public static TriangulateAndCheckReprojResult TriangulateAndCheckReproj(CameraCalibrationInfo calibrationInfo, VectorOfKeyPoint trackedFeaturesKp, VectorOfKeyPoint bootstrapKp, Matrix <double> p, Matrix <double> p1) { var result = new TriangulateAndCheckReprojResult(); var trackedFeaturesPoints = Utils.GetPointsVector(trackedFeaturesKp); var bootstrapPoints = Utils.GetPointsVector(bootstrapKp); //undistort var normalizedTrackedPts = new VectorOfPointF(); var normalizedBootstrapPts = new VectorOfPointF(); CvInvoke.UndistortPoints(trackedFeaturesPoints, normalizedTrackedPts, calibrationInfo.Intrinsic, calibrationInfo.Distortion); CvInvoke.UndistortPoints(bootstrapPoints, normalizedBootstrapPts, calibrationInfo.Intrinsic, calibrationInfo.Distortion); //triangulate var pt3Dh = new Mat(); CvInvoke.TriangulatePoints(p, p1, normalizedBootstrapPts, normalizedTrackedPts, pt3Dh); var pt3DhMatrix = new Matrix <float>(pt3Dh.Rows, pt3Dh.Cols, pt3Dh.DataPointer); var pt3DMat = new Mat(); CvInvoke.ConvertPointsFromHomogeneous(pt3DhMatrix.Transpose(), pt3DMat); var pt3D = new Matrix <float>(pt3DMat.Rows, pt3DMat.Cols * pt3DMat.NumberOfChannels, pt3DMat.DataPointer); var statusArray = new byte[pt3D.Rows]; for (int i = 0; i < pt3D.Rows; i++) { statusArray[i] = (pt3D[i, 2] > 0) ? (byte)1 : (byte)0; } var status = new VectorOfByte(statusArray); int count = CvInvoke.CountNonZero(status); double percentage = count / (double)pt3D.Rows; if (percentage > 0.75) { //calculate reprojection var rvec = new VectorOfFloat(new float[] { 0, 0, 0 }); var tvec = new VectorOfFloat(new float[] { 0, 0, 0 }); var reprojectedPtSet1 = new VectorOfPointF(); CvInvoke.ProjectPoints(pt3D, rvec, tvec, calibrationInfo.Intrinsic, calibrationInfo.Distortion, reprojectedPtSet1); double reprojErr = CvInvoke.Norm(reprojectedPtSet1, bootstrapPoints) / bootstrapPoints.Size; if (reprojErr < 5) { statusArray = new byte[bootstrapPoints.Size]; for (int i = 0; i < bootstrapPoints.Size; ++i) { var pointsDiff = Utils.SubstarctPoints(bootstrapPoints[i], reprojectedPtSet1[i]); var vectorOfPoint = new VectorOfPointF(new[] { pointsDiff }); statusArray[i] = CvInvoke.Norm(vectorOfPoint) < 20.0 ? (byte)1 : (byte)0; } status = new VectorOfByte(statusArray); var trackedFeatures3D = new VectorOfPoint3D32F(Utils.Get3dPointsArray(pt3D)); Utils.KeepVectorsByStatus(ref trackedFeaturesKp, ref trackedFeatures3D, status); result.Error = reprojErr; result.TrackedFeatures3D = new VectorOfPoint3D32F(trackedFeatures3D.ToArray()); result.FilteredTrackedFeaturesKp = new VectorOfKeyPoint(trackedFeaturesKp.ToArray()); result.FilteredBootstrapKp = new VectorOfKeyPoint(bootstrapKp.ToArray()); result.Result = true; } rvec.Dispose(); tvec.Dispose(); reprojectedPtSet1.Dispose(); } normalizedTrackedPts.Dispose(); normalizedBootstrapPts.Dispose(); pt3Dh.Dispose(); pt3DhMatrix.Dispose(); pt3DMat.Dispose(); pt3D.Dispose(); status.Dispose(); return(result); }
public static PointF[] ProjectPointd2D_Manually <T>(this PinholeCamera cam, T[] points3d, out T[] visible) where T : SPoint { var r = new List <PointF>(); var vis = new List <T>(); //var tt = cam.worldMat.Inverted(); //var transf = tt.tocv(); var transf = cam.WorldMat.Inverted(); var worldmatinv = cam.worldMat.Inverted(); var mcvpoints3d = points3d.Select(x => x.toMCvPoint3D32f()); VectorOfPoint3D32F points3dvec = new VectorOfPoint3D32F(mcvpoints3d.ToArray()); //VectorOfPoint3D32F points3dvectransf = new VectorOfPoint3D32F(points3dvec.Size); var camcoords = new Matrix <double>(3, points3d.Length); var phm_mat = new Mat(); var camcoords_mat = new Mat(); var camcoords_mat2 = new VectorOfPoint3D32F(); CvInvoke.ConvertPointsToHomogeneous(points3dvec, phm_mat); //var phm = new Matrix<float>(4, points3d.Length, phm_mat.DataPointer); //var camcoord = cam.WorldMat.Inverted() * phm; CvInvoke.Transform(phm_mat, camcoords_mat, transf); CvInvoke.ConvertPointsFromHomogeneous(camcoords_mat, camcoords_mat2); var cc = cam.toCeresCamera(); for (int i = 0; i < camcoords_mat2.Size; i++) { var phmm = new Matrix <double>(3, 1); var camcoord = Vector3d.TransformPerspective(new Vector3d(points3d[i].X, points3d[i].Y, points3d[i].Z), worldmatinv); var x = camcoord.X / camcoord.Z; var y = camcoord.Y / camcoord.Z; var r2 = x * x + y * y; var r4 = r2 * r2; var r6 = r4 * r2; var r_coeff = ((1) + cam.DistortionR1 * r2 + cam.DistortionR2 * r4 + cam.DistortionR3 * r6); var tdistx = 2 * cam.DistortionT1 * x * y + cam.DistortionT2 * (r2 + 2 * x * x); var tdisty = 2 * cam.DistortionT2 * x * y + cam.DistortionT1 * (r2 + 2 * y * y); var xd = x * r_coeff + tdistx; var yd = y * r_coeff + tdisty; var im_x2 = cam.Intrinsics.fx * xd + cam.Intrinsics.cx; var im_y2 = cam.Intrinsics.fy * yd + cam.Intrinsics.cy; if (camcoord.Z < 0) //camcoords_mat2[i].Z < 0) { { continue; } var pointf = ceresdotnet.CeresTestFunctions.ProjectPoint(cc.Internal, cc.External, points3d[i].Pos); var im_x = pointf.X; var im_y = pointf.Y; if (im_x >= 0 && im_x <= cam.PictureSize.Width && im_y >= 0 && im_y <= cam.PictureSize.Height) { vis.Add(points3d[i]); r.Add(pointf); } if (im_x2 >= 0 && im_x2 <= cam.PictureSize.Width && im_y2 >= 0 && im_y2 <= cam.PictureSize.Height) { //vis.Add(points3d[i]); //pointf.X = (float)im_x2; //pointf.Y = (float)im_y2; //r.Add(pointf); } } visible = vis.ToArray(); return(r.ToArray()); }