public bool BootstrapTrack(Mat img, Action <Matrix <double>, VectorOfPoint3D32F, Point[]> onPlaneFound)
        {
            ValidateImages(_prevGray, img);

            CvInvoke.CvtColor(img, _currGray, ColorConversion.Bgr2Gray);

            ComputeOpticalFlowAndValidate(_prevGray, _currGray, ref _trackedFeatures, ref _bootstrapKp, img);

            Matrix <double> homography;

            ComputeHomographyAndValidate(ref _trackedFeatures, ref _bootstrapKp, out homography);

            var bootstrapKpOrig     = new VectorOfKeyPoint(_bootstrapKp.ToArray());
            var trackedFeaturesOrig = new VectorOfKeyPoint(_trackedFeatures.ToArray());

            //Attempt at 3D reconstruction (triangulation) if conditions are right
            var rigidT = CvInvoke.EstimateRigidTransform(Utils.GetPointsVector(_trackedFeatures).ToArray(), Utils.GetPointsVector(_bootstrapKp).ToArray(), false);
            var matrix = new Matrix <double>(rigidT.Rows, rigidT.Cols, rigidT.DataPointer);

            if (CvInvoke.Norm(matrix.GetCol(2)) > 100)
            {
                //camera motion is sufficient
                var result = OpenCvUtilities.CameraPoseAndTriangulationFromFundamental(_calibrationInfo, _trackedFeatures, _bootstrapKp, InitialP1);
                if (result.Result)
                {
                    _trackedFeatures = result.FilteredTrackedFeaturesKp;
                    _bootstrapKp     = result.FilteredBootstrapKp;

                    _trackedFeatures3D = result.TrackedFeatures3D;

                    int             numInliers;
                    Matrix <double> trackedFeatures3Dm;
                    Mat             mean;
                    Mat             eigenvectors;
                    Matrix <double> normalOfPlaneMatrix;
                    var             statusVector = ComputeNormalAndPlaneInliers(_trackedFeatures3D, out trackedFeatures3Dm, out mean, out eigenvectors, out numInliers, out normalOfPlaneMatrix);

                    bool bootstrapping = numInliers / (double)_trackedFeatures3D.Size < 0.75;
                    if (!bootstrapping)
                    {
                        //enough features are coplanar, keep them and flatten them on the XY plane
                        Utils.KeepVectorsByStatus(ref _trackedFeatures, ref _trackedFeatures3D, statusVector);

                        //the PCA has the major axes of the plane
                        var projected = new Mat();
                        CvInvoke.PCAProject(trackedFeatures3Dm, mean, eigenvectors, projected);
                        var projectedMatrix = new Matrix <double>(projected.Rows, projected.Cols, projected.DataPointer);
                        projectedMatrix.GetCol(2).SetValue(0);
                        projectedMatrix.CopyTo(trackedFeatures3Dm);

                        //InitialP1 = result.P2;

                        VectorOfFloat raux;
                        VectorOfFloat taux;

                        ComputeRotationAndTranslation(_trackedFeatures3D, _trackedFeatures, _calibrationInfo, out raux, out taux);

                        double angle = ComputeAngleBetweenCameraNormAndPlaneNorm(_trackedFeatures3D, normalOfPlaneMatrix, raux, taux);
                        if (angle > 65 && angle < 110)
                        {
                            var points2D = FindFaceCorners(_currGray, _trackedFeatures);
                            onPlaneFound(normalOfPlaneMatrix.Clone(), new VectorOfPoint3D32F(_trackedFeatures3D.ToArray()), points2D.Points);

                            var face = ExtractFace(img, points2D);
                            face.Save("C:\\Users\\zakharov\\Documents\\Repos\\Mine\\Rc\\src\\RubiksCube.OpenCV.TestCase\\extracted.jpg");
                        }

                        return(true);
                    }

                    //cerr << "not enough features are coplanar" << "\n";
                    _bootstrapKp     = bootstrapKpOrig;
                    _trackedFeatures = trackedFeaturesOrig;
                }
            }

            return(false);
        }
        public bool BootstrapTrack(Mat img)
        {
            #region Trace

            Trace.WriteLine($"BootstrapTrack iteration ({ _trackedFeatures.Size}).");
            Trace.WriteLine("--------------------------");
            Trace.Indent();

            #endregion

            //Track detected features
            if (_prevGray.IsEmpty)
            {
                const string error = "Previous frame is empty. Bootstrap first.";
                Trace.TraceError(error);
                throw new Exception(error);
            }

            if (img.IsEmpty || !img.IsEmpty && img.NumberOfChannels != 3)
            {
                const string error = "Image is not appropriate (Empty or wrong number of channels).";
                Trace.TraceError(error);
                throw new Exception(error);
            }

            var corners = new VectorOfPointF();
            var status  = new VectorOfByte();
            var errors  = new VectorOfFloat();

            var currGray = new Mat();
            CvInvoke.CvtColor(img, currGray, ColorConversion.Bgr2Gray);

            CvInvoke.CalcOpticalFlowPyrLK(_prevGray, currGray, Utils.GetPointsVector(_trackedFeatures), corners,
                                          status, errors, new Size(11, 11), 3, new MCvTermCriteria(20, 0.03));
            currGray.CopyTo(_prevGray);

            #region Trace

            Trace.WriteLine($"Tracked first point: ({_trackedFeatures[0].Point.X}, {_trackedFeatures[0].Point.Y}) / Found first corner = ({corners[0].X}, {corners[0].Y})");
            Trace.WriteLine($"Tracked second point: ({_trackedFeatures[1].Point.X}, {_trackedFeatures[1].Point.Y}) / Found second corner = ({corners[1].X}, {corners[1].Y})");
            Trace.WriteLine($"Tracked third point: ({_trackedFeatures[2].Point.X}, {_trackedFeatures[2].Point.Y}) / Found third corner = ({corners[2].X}, {corners[2].Y})");

            #endregion

            for (int j = 0; j < corners.Size; j++)
            {
                if (status[j] == 1)
                {
                    var p1 = new Point((int)_trackedFeatures[j].Point.X, (int)_trackedFeatures[j].Point.Y);
                    var p2 = new Point((int)corners[j].X, (int)corners[j].Y);

                    CvInvoke.Line(img, p1, p2, new MCvScalar(120, 10, 20));
                }
            }

            if (CvInvoke.CountNonZero(status) < status.Size * 0.8)
            {
                Trace.TraceError("Tracking failed.");
                throw new Exception("Tracking failed.");
            }

            _trackedFeatures = Utils.GetKeyPointsVector(corners);

            Utils.KeepVectorsByStatus(ref _trackedFeatures, ref _bootstrapKp, status);

            Trace.WriteLine($"{_trackedFeatures.Size} features survived optical flow.");

            if (_trackedFeatures.Size != _bootstrapKp.Size)
            {
                const string error = "Tracked features vector size is not equal to bootstrapped one.";
                Trace.TraceError(error);
                throw new Exception(error);
            }

            #region Trace

            Trace.WriteLine($"Bootstrap first point: ({_bootstrapKp[0].Point.X}, {_bootstrapKp[0].Point.Y}) / Found first corner = ({corners[0].X}, {corners[0].Y})");
            Trace.WriteLine($"Bootstrap second point: ({_bootstrapKp[1].Point.X}, {_bootstrapKp[1].Point.Y}) / Found second corner = ({corners[1].X}, {corners[1].Y})");
            Trace.WriteLine($"Bootstrap third point: ({_bootstrapKp[2].Point.X}, {_bootstrapKp[2].Point.Y}) / Found third corner = ({corners[2].X}, {corners[2].Y})");

            #endregion

            //verify features with a homography
            var inlierMask = new VectorOfByte();
            var homography = new Mat();
            if (_trackedFeatures.Size > 4)
            {
                CvInvoke.FindHomography(Utils.GetPointsVector(_trackedFeatures), Utils.GetPointsVector(_bootstrapKp), homography, HomographyMethod.Ransac, RansacThreshold, inlierMask);
            }

            int inliersNum = CvInvoke.CountNonZero(inlierMask);

            var m = new Matrix <double>(homography.Rows, homography.Cols, homography.DataPointer);

            m.Dispose();

            Trace.WriteLine($"{inliersNum} features survived homography.");

            if (inliersNum != _trackedFeatures.Size && inliersNum >= 4 && !homography.IsEmpty)
            {
                Utils.KeepVectorsByStatus(ref _trackedFeatures, ref _bootstrapKp, inlierMask);
            }
            else if (inliersNum < MinInliers)
            {
                Trace.TraceError("Not enough features survived homography.");
                return(false);
            }

            var bootstrapKpOrig     = new VectorOfKeyPoint(_bootstrapKp.ToArray());
            var trackedFeaturesOrig = new VectorOfKeyPoint(_trackedFeatures.ToArray());

            //Attempt at 3D reconstruction (triangulation) if conditions are right
            var rigidT = CvInvoke.EstimateRigidTransform(Utils.GetPointsVector(_trackedFeatures).ToArray(), Utils.GetPointsVector(_bootstrapKp).ToArray(), false);
            var matrix = new Matrix <double>(rigidT.Rows, rigidT.Cols, rigidT.DataPointer);

            #region Trace

            Trace.WriteLine($"Track first point: ({_trackedFeatures[0].Point.X}, {_trackedFeatures[0].Point.Y}) / Bootstrap first point = ({_bootstrapKp[0].Point.X}, {_bootstrapKp[0].Point.Y})");
            Trace.WriteLine($"Track 10th point: ({_trackedFeatures[10].Point.X}, {_trackedFeatures[10].Point.Y}) / Bootstrap 10th point = ({_bootstrapKp[10].Point.X}, {_bootstrapKp[10].Point.Y})");
            Trace.WriteLine($"Track last point: ({_trackedFeatures[_trackedFeatures.Size - 1].Point.X}, {_trackedFeatures[_trackedFeatures.Size - 1].Point.Y}" +
                            $") / Bootstrap third point = ({_bootstrapKp[_bootstrapKp.Size - 1].Point.X}, {_bootstrapKp[_bootstrapKp.Size - 1].Point.Y})");

            Trace.WriteLine($"Rigid matrix: [ [ {matrix[0, 0]}, {matrix[0, 1]}, {matrix[0, 2]} ] [ {matrix[1, 0]}, {matrix[1, 1]}, {matrix[1, 2]} ] ].");
            Trace.WriteLine($"Rigid: {CvInvoke.Norm(matrix.GetCol(2))}");

            #endregion

            if (CvInvoke.Norm(matrix.GetCol(2)) > 100)
            {
                //camera motion is sufficient
                var p1 = new Matrix <double>(3, 4);
                p1.SetIdentity();
                var result = OpenCvUtilities.CameraPoseAndTriangulationFromFundamental(_calibrationInfo, _trackedFeatures, _bootstrapKp, p1);

                _trackedFeatures = result.FilteredTrackedFeaturesKp;
                _bootstrapKp     = result.FilteredBootstrapKp;

                if (result.Result)
                {
                    _trackedFeatures3D = result.TrackedFeatures3D;
                    var trackedFeatures3Dm = Utils.Get3dPointsMat(_trackedFeatures3D);

                    var eigenvectors = new Mat();
                    var mean         = new Mat();
                    CvInvoke.PCACompute(trackedFeatures3Dm, mean, eigenvectors);
                    var eigenvectorsMatrix = new Matrix <double>(eigenvectors.Rows, eigenvectors.Cols, eigenvectors.DataPointer);

                    int numInliers    = 0;
                    var normalOfPlane = eigenvectorsMatrix.GetRow(2).ToUMat().ToMat(AccessType.Fast);
                    //eigenvectors.GetRow(2).CopyTo(normalOfPlane);
                    CvInvoke.Normalize(normalOfPlane, normalOfPlane);

                    var normalOfPlaneMatrix = new Matrix <double>(normalOfPlane.Rows, normalOfPlane.Cols, normalOfPlane.DataPointer);
                    Trace.WriteLine($"normal of plane: {normalOfPlaneMatrix[0, 0]}");
                    //cv::Vec3d x0 = pca.mean;
                    //double p_to_plane_thresh = sqrt(pca.eigenvalues.at<double>(2));
                }

                return(true);
            }

            #region Trace

            Trace.Unindent();
            Trace.WriteLine("--------------------------");

            #endregion

            return(false);
        }