public bool BootstrapTrack(Mat img, Action <Matrix <double>, VectorOfPoint3D32F, Point[]> onPlaneFound) { ValidateImages(_prevGray, img); CvInvoke.CvtColor(img, _currGray, ColorConversion.Bgr2Gray); ComputeOpticalFlowAndValidate(_prevGray, _currGray, ref _trackedFeatures, ref _bootstrapKp, img); Matrix <double> homography; ComputeHomographyAndValidate(ref _trackedFeatures, ref _bootstrapKp, out homography); var bootstrapKpOrig = new VectorOfKeyPoint(_bootstrapKp.ToArray()); var trackedFeaturesOrig = new VectorOfKeyPoint(_trackedFeatures.ToArray()); //Attempt at 3D reconstruction (triangulation) if conditions are right var rigidT = CvInvoke.EstimateRigidTransform(Utils.GetPointsVector(_trackedFeatures).ToArray(), Utils.GetPointsVector(_bootstrapKp).ToArray(), false); var matrix = new Matrix <double>(rigidT.Rows, rigidT.Cols, rigidT.DataPointer); if (CvInvoke.Norm(matrix.GetCol(2)) > 100) { //camera motion is sufficient var result = OpenCvUtilities.CameraPoseAndTriangulationFromFundamental(_calibrationInfo, _trackedFeatures, _bootstrapKp, InitialP1); if (result.Result) { _trackedFeatures = result.FilteredTrackedFeaturesKp; _bootstrapKp = result.FilteredBootstrapKp; _trackedFeatures3D = result.TrackedFeatures3D; int numInliers; Matrix <double> trackedFeatures3Dm; Mat mean; Mat eigenvectors; Matrix <double> normalOfPlaneMatrix; var statusVector = ComputeNormalAndPlaneInliers(_trackedFeatures3D, out trackedFeatures3Dm, out mean, out eigenvectors, out numInliers, out normalOfPlaneMatrix); bool bootstrapping = numInliers / (double)_trackedFeatures3D.Size < 0.75; if (!bootstrapping) { //enough features are coplanar, keep them and flatten them on the XY plane Utils.KeepVectorsByStatus(ref _trackedFeatures, ref _trackedFeatures3D, statusVector); //the PCA has the major axes of the plane var projected = new Mat(); CvInvoke.PCAProject(trackedFeatures3Dm, mean, eigenvectors, projected); var projectedMatrix = new Matrix <double>(projected.Rows, projected.Cols, projected.DataPointer); projectedMatrix.GetCol(2).SetValue(0); projectedMatrix.CopyTo(trackedFeatures3Dm); //InitialP1 = result.P2; VectorOfFloat raux; VectorOfFloat taux; ComputeRotationAndTranslation(_trackedFeatures3D, _trackedFeatures, _calibrationInfo, out raux, out taux); double angle = ComputeAngleBetweenCameraNormAndPlaneNorm(_trackedFeatures3D, normalOfPlaneMatrix, raux, taux); if (angle > 65 && angle < 110) { var points2D = FindFaceCorners(_currGray, _trackedFeatures); onPlaneFound(normalOfPlaneMatrix.Clone(), new VectorOfPoint3D32F(_trackedFeatures3D.ToArray()), points2D.Points); var face = ExtractFace(img, points2D); face.Save("C:\\Users\\zakharov\\Documents\\Repos\\Mine\\Rc\\src\\RubiksCube.OpenCV.TestCase\\extracted.jpg"); } return(true); } //cerr << "not enough features are coplanar" << "\n"; _bootstrapKp = bootstrapKpOrig; _trackedFeatures = trackedFeaturesOrig; } } return(false); }
/// <summary> /// /// </summary> /// <param name="calibrationInfo"></param> /// <param name="trackedFeaturesKp"></param> /// <param name="bootstrapKp"></param> /// <param name="p"></param> /// <param name="p1"></param> /// <returns></returns> public static TriangulateAndCheckReprojResult TriangulateAndCheckReproj(CameraCalibrationInfo calibrationInfo, VectorOfKeyPoint trackedFeaturesKp, VectorOfKeyPoint bootstrapKp, Matrix <double> p, Matrix <double> p1) { var result = new TriangulateAndCheckReprojResult(); var trackedFeaturesPoints = Utils.GetPointsVector(trackedFeaturesKp); var bootstrapPoints = Utils.GetPointsVector(bootstrapKp); //undistort var normalizedTrackedPts = new VectorOfPointF(); var normalizedBootstrapPts = new VectorOfPointF(); CvInvoke.UndistortPoints(trackedFeaturesPoints, normalizedTrackedPts, calibrationInfo.Intrinsic, calibrationInfo.Distortion); CvInvoke.UndistortPoints(bootstrapPoints, normalizedBootstrapPts, calibrationInfo.Intrinsic, calibrationInfo.Distortion); //triangulate var pt3Dh = new Mat(); CvInvoke.TriangulatePoints(p, p1, normalizedBootstrapPts, normalizedTrackedPts, pt3Dh); var pt3DhMatrix = new Matrix <float>(pt3Dh.Rows, pt3Dh.Cols, pt3Dh.DataPointer); var pt3DMat = new Mat(); CvInvoke.ConvertPointsFromHomogeneous(pt3DhMatrix.Transpose(), pt3DMat); var pt3D = new Matrix <float>(pt3DMat.Rows, pt3DMat.Cols * pt3DMat.NumberOfChannels, pt3DMat.DataPointer); var statusArray = new byte[pt3D.Rows]; for (int i = 0; i < pt3D.Rows; i++) { statusArray[i] = (pt3D[i, 2] > 0) ? (byte)1 : (byte)0; } var status = new VectorOfByte(statusArray); int count = CvInvoke.CountNonZero(status); double percentage = count / (double)pt3D.Rows; if (percentage > 0.75) { //calculate reprojection var rvec = new VectorOfFloat(new float[] { 0, 0, 0 }); var tvec = new VectorOfFloat(new float[] { 0, 0, 0 }); var reprojectedPtSet1 = new VectorOfPointF(); CvInvoke.ProjectPoints(pt3D, rvec, tvec, calibrationInfo.Intrinsic, calibrationInfo.Distortion, reprojectedPtSet1); double reprojErr = CvInvoke.Norm(reprojectedPtSet1, bootstrapPoints) / bootstrapPoints.Size; if (reprojErr < 5) { statusArray = new byte[bootstrapPoints.Size]; for (int i = 0; i < bootstrapPoints.Size; ++i) { var pointsDiff = Utils.SubstarctPoints(bootstrapPoints[i], reprojectedPtSet1[i]); var vectorOfPoint = new VectorOfPointF(new[] { pointsDiff }); statusArray[i] = CvInvoke.Norm(vectorOfPoint) < 20.0 ? (byte)1 : (byte)0; } status = new VectorOfByte(statusArray); var trackedFeatures3D = new VectorOfPoint3D32F(Utils.Get3dPointsArray(pt3D)); Utils.KeepVectorsByStatus(ref trackedFeaturesKp, ref trackedFeatures3D, status); result.Error = reprojErr; result.TrackedFeatures3D = new VectorOfPoint3D32F(trackedFeatures3D.ToArray()); result.FilteredTrackedFeaturesKp = new VectorOfKeyPoint(trackedFeaturesKp.ToArray()); result.FilteredBootstrapKp = new VectorOfKeyPoint(bootstrapKp.ToArray()); result.Result = true; } rvec.Dispose(); tvec.Dispose(); reprojectedPtSet1.Dispose(); } normalizedTrackedPts.Dispose(); normalizedBootstrapPts.Dispose(); pt3Dh.Dispose(); pt3DhMatrix.Dispose(); pt3DMat.Dispose(); pt3D.Dispose(); status.Dispose(); return(result); }