Ejemplo n.º 1
0
        private void TrackFeatures(Image <Gray, byte> grayImage)
        {
            if (framesProcessed == 1)
            {
                keyPoints   = featureDetector.Detect(grayImage);
                kpVector    = new VectorOfPointF((keyPoints.Select(p => p.Point).ToArray()));
                nextVector  = new VectorOfPointF(kpVector.Size);
                statusArray = new VectorOfByte(kpVector.Size);
                errArray    = new VectorOfFloat(kpVector.Size);
            }
            else if (framesProcessed > 2)
            {
                kpVector = nextVector;
            }

            if (framesProcessed % 50 == 0)
            {
                kpVector = CreateGrid(currentImage);
            }

            if (framesProcessed >= 2)
            {
                CvInvoke.CalcOpticalFlowPyrLK(lastGray, grayImage, kpVector, nextVector, statusArray, errArray, new Size(trackBar1.Value * 2 + 2, trackBar1.Value * 2 + 2), trackBar4.Value, new MCvTermCriteria(trackBar2.Value, trackBar3.Value / 100.0));
                DrawPoints(nextVector, Color.Blue);
            }
        }
Ejemplo n.º 2
0
        public PointF[] FPoints(MKeyPoint[] GFP1, Image <Bgr, byte> image1, Image <Bgr, byte> image2)
        {
            PointF[] srcPoints = new PointF[GFP1.Length];
            for (int i = 0; i < GFP1.Length; i++)
            {
                srcPoints[i] = GFP1[i].Point;
            }

            PointF[] destPoints;
            byte[]   status;
            float[]  trackErrors;

            CvInvoke.CalcOpticalFlowPyrLK(image1.Convert <Gray, byte>().Mat,
                                          image2.Convert <Gray, byte>().Mat,
                                          srcPoints,
                                          new Size(20, 20),
                                          5,
                                          new MCvTermCriteria(20, 1),
                                          out destPoints,
                                          out status,
                                          out trackErrors
                                          );

            return(destPoints);
        }
Ejemplo n.º 3
0
        public Mat GetTransform()
        {
            Mat status = new Mat();
            Mat err    = new Mat();
            Mat outs   = new Mat();

            CvInvoke.CalcOpticalFlowPyrLK(bgImage, curFrame, corners, outs, status, err, new Size(15, 15), 2,
                                          new Emgu.CV.Structure.MCvTermCriteria());
            return(CvInvoke.GetPerspectiveTransform(corners, outs));
        }
Ejemplo n.º 4
0
        private void button2_Click(object sender, EventArgs e)
        {
            GFTTDetector detector = new GFTTDetector(40, 0.01, 5, 3, true);

            MKeyPoint[] GFP1 = detector.Detect(baseImg.Convert <Gray, byte>().Mat);


            //создание массива характерных точек исходного изображения (только позиции)
            PointF[] srcPoints = new PointF[GFP1.Length];
            for (int i = 0; i < GFP1.Length; i++)
            {
                srcPoints[i] = GFP1[i].Point;
            }
            PointF[] destPoints;                       //массив для хранения позиций точек на изменённом изображении
            byte[]   status;                           //статус точек (найдены/не найдены)
            float[]  trackErrors;                      //ошибки
                                                       //вычисление позиций характерных точек на новом изображении методом Лукаса-Канаде
            CvInvoke.CalcOpticalFlowPyrLK(
                baseImg.Convert <Gray, byte>().Mat,    //исходное изображение
                twistedImg.Convert <Gray, byte>().Mat, //изменённое изображение
                srcPoints,                             //массив характерных точек исходного изображения
                new Size(20, 20),                      //размер окна поиска
                5,                                     //уровни пирамиды
                new MCvTermCriteria(20, 1),            //условие остановки вычисления оптического потока
                out destPoints,                        //позиции характерных точек на новом изображении
                out status,                            //содержит 1 в элементах, для которых поток был найден
                out trackErrors                        //содержит ошибки
                );

            //вычисление матрицы гомографии
            Mat homographyMatrix = CvInvoke.FindHomography(destPoints, srcPoints,
                                                           RobustEstimationAlgorithm.LMEDS);
            var destImage = new Image <Bgr, byte>(baseImg.Size);

            CvInvoke.WarpPerspective(twistedImg, destImage, homographyMatrix, destImage.Size);


            //var output1 = baseImg.Clone();

            //foreach (MKeyPoint p in GFP1)
            //{
            //    CvInvoke.Circle(output1, Point.Round(p.Point), 3, new Bgr(Color.Blue).MCvScalar, 2);
            //}
            //imageBox1.Image = output1.Resize(640, 480, Inter.Linear);

            ////var output2 = twistedImg.Clone();

            //foreach (PointF p in destPoints)
            //{
            //    CvInvoke.Circle(destImage, Point.Round(p), 3, new Bgr(Color.Blue).MCvScalar, 2);
            //}
            imageBox2.Image = destImage.Resize(640, 480, Inter.Linear);
        }
Ejemplo n.º 5
0
        public Image <Bgr, byte> GFTT()
        {
            GFTTDetector detector = new GFTTDetector(40, 0.01, 5, 3, true);

            MKeyPoint[] GFP1 = detector.Detect(baseImage.Convert <Gray, byte>().Mat);

            //создание массива характерных точек исходного изображения (только позиции)
            PointF[] srcPoints = new PointF[GFP1.Length];
            for (int i = 0; i < GFP1.Length; i++)
            {
                srcPoints[i] = GFP1[i].Point;
            }
            PointF[] destPoints;                         //массив для хранения позиций точек на изменённом изображении
            byte[]   status;                             //статус точек (найдены/не найдены)
            float[]  trackErrors;                        //ошибки
                                                         //вычисление позиций характерных точек на новом изображении методом Лукаса-Канаде
            CvInvoke.CalcOpticalFlowPyrLK(
                baseImage.Convert <Gray, byte>().Mat,    //исходное изображение
                twistedImage.Convert <Gray, byte>().Mat, //изменённое изображение
                srcPoints,                               //массив характерных точек исходного изображения
                new Size(20, 20),                        //размер окна поиска
                5,                                       //уровни пирамиды
                new MCvTermCriteria(20, 1),              //условие остановки вычисления оптического потока
                out destPoints,                          //позиции характерных точек на новом изображении
                out status,                              //содержит 1 в элементах, для которых поток был найден
                out trackErrors                          //содержит ошибки
                );


            var output = baseImage.Clone();

            foreach (MKeyPoint p in GFP1)
            {
                CvInvoke.Circle(output, Point.Round(p.Point), 3, new Bgr(Color.Blue).MCvScalar, 2);
            }

            var output2 = twistedImage.Clone();

            foreach (MKeyPoint p in GFP1)
            {
                CvInvoke.Circle(output2, Point.Round(p.Point), 3, new Bgr(Color.Blue).MCvScalar, 2);
            }

            return(output.Resize(640, 480, Inter.Linear));
        }
Ejemplo n.º 6
0
        private void opticalFlow()
        {
            var a = new MCvTermCriteria(100);

            byte[]   status2;
            float[]  errors2;
            PointF[] corners2;
            var      vectors2  = vector2Point(vectors);
            var      keypoints = fastDetector.Detect(inputGrayImage, null);
            var      corners   = new VectorOfKeyPoint(keypoints);

            corners2 = vector2Point(corners);

            CvInvoke.CalcOpticalFlowPyrLK(inputGrayImagePrevious, inputGrayImage, vectors2, new Size(10, 10), 3, a, out corners2, out status2, out errors2);
            var matches = countNonZero(status2);

            labelStatus.Text = "Found =" + Convert.ToString(matches);
            labelTotal.Text  = "Total =" + Convert.ToString(errors2.Count());
        }
Ejemplo n.º 7
0
        public Image <Bgr, byte> ReturnLucas(Image <Bgr, byte> image, Image <Bgr, byte> twistedImg, out Image <Bgr, byte> defImg)
        {
            GFTTDetector detector = new GFTTDetector(40, 0.01, 5, 3, true);

            MKeyPoint[] GFP1 = detector.Detect(image.Convert <Gray, byte>().Mat);
            foreach (MKeyPoint p in GFP1)
            {
                CvInvoke.Circle(image, Point.Round(p.Point), 5, new Bgr(Color.LawnGreen).MCvScalar, 2);
            }
            defImg = image;

            PointF[] srcPoints = new PointF[GFP1.Length];

            for (int i = 0; i < GFP1.Length; i++)
            {
                srcPoints[i] = GFP1[i].Point;
            }
            PointF[] destPoints;                       //массив для хранения позиций точек на изменённом изображении
            byte[]   status;                           //статус точек (найдены/не найдены)
            float[]  trackErrors;                      //ошибки
                                                       //вычисление позиций характерных точек на новом изображении методом Лукаса-Канаде
            CvInvoke.CalcOpticalFlowPyrLK(
                image.Convert <Gray, byte>().Mat,      //исходное изображение
                twistedImg.Convert <Gray, byte>().Mat, //изменённое изображение
                srcPoints,                             //массив характерных точек исходного изображения
                new Size(20, 20),                      //размер окна поиска
                5,                                     //уровни пирамиды
                new MCvTermCriteria(20, 1),            //условие остановки вычисления оптического потока
                out destPoints,                        //позиции характерных точек на новом изображении
                out status,                            //содержит 1 в элементах, для которых поток был найден
                out trackErrors                        //содержит ошибки
                );

            //for (int i = 0; i < destPoints.Length; i++)
            //    srcPoints[i] = GFP1[i].Point;
            foreach (PointF p in destPoints)
            {
                CvInvoke.Circle(twistedImg, Point.Round(p), 5, new Bgr(Color.LawnGreen).MCvScalar, 2);
            }
            return(twistedImg);
        }
Ejemplo n.º 8
0
        private static void ComputeOpticalFlowAndValidate(Mat prevGray, Mat currGray, ref VectorOfKeyPoint trackedFeatures, ref VectorOfKeyPoint bootstrapKp, Mat img = null)
        {
            var corners = new VectorOfPointF();
            var status  = new VectorOfByte();
            var errors  = new VectorOfFloat();

            CvInvoke.CalcOpticalFlowPyrLK(prevGray, currGray, Utils.GetPointsVector(trackedFeatures), corners,
                                          status, errors, new Size(11, 11), 3, new MCvTermCriteria(30, 0.01));
            currGray.CopyTo(prevGray);

            if (img != null)
            {
                for (int j = 0; j < corners.Size; j++)
                {
                    if (status[j] == 1)
                    {
                        CvInvoke.Line(img, new Point((int)trackedFeatures[j].Point.X, (int)trackedFeatures[j].Point.Y),
                                      new Point((int)corners[j].X, (int)corners[j].Y), new MCvScalar(120, 10, 20));
                    }
                }
            }

            if (CvInvoke.CountNonZero(status) < status.Size * 0.8)
            {
                throw new Exception("Tracking failed.");
            }

            trackedFeatures = Utils.GetKeyPointsVector(corners);

            Utils.KeepVectorsByStatus(ref trackedFeatures, ref bootstrapKp, status);

            if (trackedFeatures.Size != bootstrapKp.Size)
            {
                const string error = "Tracked features vector size is not equal to bootstrapped one.";
                throw new Exception(error);
            }
        }
Ejemplo n.º 9
0
        // Calculate Optical Flow Using PyrLk Algorithm
        public void PyrLkOpticalFlow(Image <Gray, byte> prevFrame, Image <Gray, byte> nextFrame)
        {
            //Get the Optical flow of L-K feature
            Image <Gray, Byte> mask     = prevFrame.Clone();
            GFTTDetector       detector = new GFTTDetector(30, 0.01, 10, 3, false, 0.04);

            MKeyPoint[]     fp1      = detector.Detect(prevFrame, null);
            VectorOfPointF  vp1      = new VectorOfPointF(fp1.Select(x => x.Point).ToArray());
            VectorOfPointF  vp2      = new VectorOfPointF(vp1.Size);
            VectorOfByte    vstatus  = new VectorOfByte(vp1.Size);
            VectorOfFloat   verr     = new VectorOfFloat(vp1.Size);
            Size            winsize  = new Size(prevFrame.Width, prevFrame.Height);
            int             maxLevel = 1; // if 0, winsize is not used
            MCvTermCriteria criteria = new MCvTermCriteria(10, 1);

            try
            {
                CvInvoke.CalcOpticalFlowPyrLK(prevFrame, nextFrame, vp1, vp2, vstatus, verr, winsize, maxLevel, criteria);
            }
            catch (Exception e)
            {
                Console.WriteLine(e.Message);
            }
        }
        public void Bootstrap_Track_Logic_Test()
        {
            var capture = new Capture($@"{TestCaseProjectPath}\Videos\cube2.avi");

            //var capture = new Capture(@"C:\Users\zakharov\Documents\Repos\Mine\Rc\src\RubiksCube.OpenCV.TestCase\Videos\cube2.avi");
            for (int i = 0; i < 40; i++)
            {
                capture.QueryFrame();
            }

            var prevGray = capture.QueryFrame();

            CvInvoke.CvtColor(prevGray, prevGray, ColorConversion.Bgr2Gray);

            var currentGray = capture.QueryFrame();

            CvInvoke.CvtColor(currentGray, currentGray, ColorConversion.Bgr2Gray);

            var bootstrapKp = new VectorOfKeyPoint();

            new ORBDetector().DetectRaw(prevGray, bootstrapKp);

            var trackedFeatures = new VectorOfKeyPoint(bootstrapKp.ToArray());

            //-------------------------------------------------------------------------

            var pointComparer = Comparer <PointF> .Create((p1, p2) => Math.Abs(p1.X - p2.X) < 0.0001f && Math.Abs(p1.Y - p2.Y) < 0.0001f? 0 : 1);

            var point3DComparer = Comparer <MCvPoint3D32f> .Create((p1, p2) => Math.Abs(p1.X - p2.X) < 0.0001f && Math.Abs(p1.Y - p2.Y) < 0.0001f && Math.Abs(p1.Z - p2.Z) < 0.0001f? 0 : 1);

            var matrixComparer = Comparer <double> .Create((x, y) => Math.Abs(x - y) < 0.0001f? 0 : 1);

            for (int i = 41; i <= 95; i++)
            {
                var bootstrapPointsBeforeOpticalFlowCplusPlus = GetPoints($"I = {i}txt - Bootstrap points before optical flow.txt");
                var trackedPointsBeforeOpticalFlowCplusPlus   = GetPoints($"I = {i}txt - Tracked points before optical flow.txt");
                var bootstrapPointsAfterOpticalFlowCplusPlus  = GetPoints($"I = {i}txt - Bootstrap points after optical flow.txt");
                var trackedPointsAfterOpticalFlowCplusPlus    = GetPoints($"I = {i}txt - Tracked points after optical flow.txt");
                var bootstrapPointsAfterHomographyCplusPlus   = GetPoints($"I = {i}txt - Bootstrap points after homography.txt");
                var trackedPointsAfterHomographyCplusPlus     = GetPoints($"I = {i}txt - Tracked points after homography.txt");

                var homographyCplusPlus     = Getmatrix3X3($"I = {i}txt - Homography.txt");
                var homographyMaskCplusPlus = GetByteVector($"I = {i}txt - Homography mask.txt");

                var corners = new VectorOfPointF();
                var status  = new VectorOfByte();
                var errors  = new VectorOfFloat();

                CollectionAssert.AreEqual(Utils.GetPointsVector(bootstrapKp).ToArray(), bootstrapPointsBeforeOpticalFlowCplusPlus.ToArray(), pointComparer);
                CollectionAssert.AreEqual(Utils.GetPointsVector(trackedFeatures).ToArray(), trackedPointsBeforeOpticalFlowCplusPlus.ToArray(), pointComparer);

                CvInvoke.CalcOpticalFlowPyrLK(prevGray, currentGray, Utils.GetPointsVector(trackedFeatures), corners,
                                              status, errors, new Size(11, 11), 3, new MCvTermCriteria(30, 0.01));
                currentGray.CopyTo(prevGray);

                if (CvInvoke.CountNonZero(status) < status.Size * 0.8)
                {
                    throw new Exception("Tracking failed.");
                }

                trackedFeatures = Utils.GetKeyPointsVector(corners);
                Utils.KeepVectorsByStatus(ref trackedFeatures, ref bootstrapKp, status);

                CollectionAssert.AreEqual(Utils.GetPointsVector(bootstrapKp).ToArray(), bootstrapPointsAfterOpticalFlowCplusPlus.ToArray(), pointComparer);
                CollectionAssert.AreEqual(Utils.GetPointsVector(trackedFeatures).ToArray(), trackedPointsAfterOpticalFlowCplusPlus.ToArray(), pointComparer);

                if (trackedFeatures.Size != bootstrapKp.Size)
                {
                    const string error = "Tracked features vector size is not equal to bootstrapped one.";
                    throw new Exception(error);
                }

                //verify features with a homography
                var inlierMask = new VectorOfByte();
                var homography = new Mat();
                if (trackedFeatures.Size > 4)
                {
                    CvInvoke.FindHomography(Utils.GetPointsVector(trackedFeatures), Utils.GetPointsVector(bootstrapKp), homography, HomographyMethod.Ransac, 0.99,
                                            inlierMask);
                }

                var homographyMatrix = new Matrix <double>(homography.Rows, homography.Cols, homography.DataPointer);
                CollectionAssert.AreEqual(homographyMatrix.Data, homographyCplusPlus.Data, matrixComparer);

                int inliersNum = CvInvoke.CountNonZero(inlierMask);
                CollectionAssert.AreEqual(inlierMask.ToArray(), homographyMaskCplusPlus.ToArray());

                if (inliersNum != trackedFeatures.Size && inliersNum >= 4 && !homography.IsEmpty)
                {
                    Utils.KeepVectorsByStatus(ref trackedFeatures, ref bootstrapKp, inlierMask);
                }
                else if (inliersNum < 10)
                {
                    throw new Exception("Not enough features survived homography.");
                }

                CollectionAssert.AreEqual(Utils.GetPointsVector(bootstrapKp).ToArray(), bootstrapPointsAfterHomographyCplusPlus.ToArray(), pointComparer);
                CollectionAssert.AreEqual(Utils.GetPointsVector(trackedFeatures).ToArray(), trackedPointsAfterHomographyCplusPlus.ToArray(), pointComparer);

                var bootstrapKpOrig     = new VectorOfKeyPoint(bootstrapKp.ToArray());
                var trackedFeaturesOrig = new VectorOfKeyPoint(trackedFeatures.ToArray());

                //TODO: Compare all these to c++ version
                //Attempt at 3D reconstruction (triangulation) if conditions are right
                var rigidT = CvInvoke.EstimateRigidTransform(Utils.GetPointsVector(trackedFeatures).ToArray(), Utils.GetPointsVector(bootstrapKp).ToArray(), false);
                var matrix = new Matrix <double>(rigidT.Rows, rigidT.Cols, rigidT.DataPointer);

                if (CvInvoke.Norm(matrix.GetCol(2)) > 100)
                {
                    var points3DCplusPlus      = GetPoints3d($"I = {i}txt - 3d points.txt");
                    var eigenvectorsCplusPlus  = Getmatrix3X3($"I = {i}txt - eigenvectors.txt");
                    var normalOfPlaneCplusPlus = GetDoubleArray($"I = {i}txt - normal of plane.txt");

                    //camera motion is sufficient
                    var p1Init = new Matrix <double>(3, 4);
                    p1Init.SetIdentity();
                    var result = OpenCvUtilities.CameraPoseAndTriangulationFromFundamental(_calibration, trackedFeatures, bootstrapKp, p1Init);

                    trackedFeatures = result.FilteredTrackedFeaturesKp;
                    bootstrapKp     = result.FilteredBootstrapKp;

                    if (result.Result)
                    {
                        double pToPlaneTrashCplusPlus = GetDouble($"I = {i}txt - p_to_plane_thresh.txt");
                        int    numInliersCplusPlus    = GetInt($"I = {i}txt - num inliers.txt");
                        var    statusArrCplusPlus     = GetByteVector($"I = {i}txt - status arr.txt");

                        var trackedFeatures3D = result.TrackedFeatures3D;

                        CollectionAssert.AreEqual(trackedFeatures3D.ToArray(), points3DCplusPlus.ToArray(), point3DComparer);

                        //var trackedFeatures3Dm = Utils.Get3dPointsMat(trackedFeatures3D);
                        var tf3D = new double[trackedFeatures3D.Size, 3];
                        var trackedFeatures3Dm = new Matrix <double>(trackedFeatures3D.Size, 3);
                        for (int k = 0; k < trackedFeatures3D.Size; k++)
                        {
                            trackedFeatures3Dm[k, 0] = trackedFeatures3D[k].X;
                            trackedFeatures3Dm[k, 1] = trackedFeatures3D[k].Y;
                            trackedFeatures3Dm[k, 2] = trackedFeatures3D[k].Z;

                            tf3D[k, 0] = trackedFeatures3D[k].X;
                            tf3D[k, 1] = trackedFeatures3D[k].Y;
                            tf3D[k, 2] = trackedFeatures3D[k].Z;
                        }

                        var eigenvectors = new Mat();
                        var mean         = new Mat();
                        CvInvoke.PCACompute(trackedFeatures3Dm, mean, eigenvectors);
                        var eigenvectorsMatrix = new Matrix <double>(eigenvectors.Rows, eigenvectors.Cols, eigenvectors.DataPointer);

                        CollectionAssert.AreEqual(eigenvectorsMatrix.Data, eigenvectorsCplusPlus.Data, matrixComparer);

                        var method = PrincipalComponentMethod.Center;
                        var pca    = new PrincipalComponentAnalysis(method);
                        pca.Learn(tf3D.ToJagged());

                        var meanMatrix = new Matrix <double>(mean.Rows, mean.Cols, mean.DataPointer);
                        CollectionAssert.AreEqual(meanMatrix.Data.ToJagged()[0], pca.Means, matrixComparer);

                        int numInliers    = 0;
                        var normalOfPlane = eigenvectorsMatrix.GetRow(2).ToUMat().ToMat(AccessType.Fast);
                        CvInvoke.Normalize(normalOfPlane, normalOfPlane);

                        var normalOfPlaneMatrix = new Matrix <double>(normalOfPlane.Rows, normalOfPlane.Cols, normalOfPlane.DataPointer);
                        var normalOfPlaneArray  = new[] { normalOfPlaneMatrix[0, 0], normalOfPlaneMatrix[0, 1], normalOfPlaneMatrix[0, 2] };

                        CollectionAssert.AreEqual(normalOfPlaneArray, normalOfPlaneCplusPlus, matrixComparer);

                        double pToPlaneThresh = Math.Sqrt(pca.Eigenvalues.ElementAt(2));

                        Assert.AreEqual(pToPlaneTrashCplusPlus, pToPlaneThresh, 0.0001);

                        var statusArray = new byte[trackedFeatures3D.Size];
                        for (int k = 0; k < trackedFeatures3D.Size; k++)
                        {
                            var t1 = new double[] { trackedFeatures3D[k].X, trackedFeatures3D[k].Y, trackedFeatures3D[k].Z };
                            var t2 = t1.Subtract(pca.Means);
                            var w  = new Matrix <double>(new[, ] {
                                { t2[0], t2[1], t2[2] }
                            });
                            double d = Math.Abs(normalOfPlane.Dot(w));
                            if (d < pToPlaneThresh)
                            {
                                numInliers++;
                                statusArray[k] = 1;
                            }
                        }

                        Assert.AreEqual(numInliersCplusPlus, numInliers);

                        var statusVector = new VectorOfByte(statusArray);
                        CollectionAssert.AreEqual(statusArrCplusPlus.ToArray(), statusVector.ToArray());

                        bool bootstrapping = numInliers / (double)trackedFeatures3D.Size < 0.75;
                        if (!bootstrapping)
                        {
                            //enough features are coplanar, keep them and flatten them on the XY plane
                            Utils.KeepVectorsByStatus(ref trackedFeatures, ref trackedFeatures3D, statusVector);

                            //the PCA has the major axes of the plane
                            var projected = new Mat();
                            CvInvoke.PCAProject(trackedFeatures3Dm, mean, eigenvectors, projected);
                            var projectedMatrix = new Matrix <double>(projected.Rows, projected.Cols, projected.DataPointer);
                            projectedMatrix.GetCol(2).SetValue(0);
                            projectedMatrix.CopyTo(trackedFeatures3Dm);
                        }
                        else
                        {
                            bootstrapKp     = bootstrapKpOrig;
                            trackedFeatures = trackedFeaturesOrig;
                        }
                    }
                }

                currentGray = capture.QueryFrame();
                CvInvoke.CvtColor(currentGray, currentGray, ColorConversion.Bgr2Gray);
            }
        }
Ejemplo n.º 11
0
        private void ProcessFrame(object sender, EventArgs arg)
        {
            if (capture != null && capture.Ptr != IntPtr.Zero)
            {
                //Retrieve video from the camera and store in frame
                Mat frame = new Mat();
                capture.Retrieve(frame);

                //Copy frame to Result Frame
                Mat ResultFrame = new Mat();
                frame.CopyTo(ResultFrame);

                //Create a nextFrame and convert the captured frame to grayscale
                nextFrame = new Mat();
                CvInvoke.CvtColor(frame, nextFrame, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);

                //If the prevFrame is not empty run program to detect moving frame
                if (!prevFrameEmpty)
                {
                    //Detect Keypoints using good feature to track command
                    VectorOfKeyPoint prevFeatures = new VectorOfKeyPoint(gFTTDetector.Detect(prevFrame));

                    //Store KeyPoints in Floaing Point variable
                    PointF[] prevPts = new PointF[prevFeatures.Size];
                    for (int i = 0; i < prevFeatures.Size; i++)
                    {
                        prevPts[i] = prevFeatures[i].Point;
                    }

                    //Declare variables to store results of optical flow
                    PointF[] nextPts;
                    byte[]   status;
                    float[]  errors;

                    //Run Lucas-Kanade Optical Flow by comparing the previous and the next frame
                    CvInvoke.CalcOpticalFlowPyrLK(prevFrame, nextFrame, prevPts, new Size(25, 25), 1, new MCvTermCriteria(20, 0.03), out nextPts, out status, out errors);

                    //Display results of motion by drawing circles
                    for (int i = 0; i < status.Length; i++)
                    {
                        Point  prevPt = new Point((int)prevPts[i].X, (int)nextPts[i].Y);
                        Point  nextPt = new Point((int)nextPts[i].X, (int)nextPts[i].Y);
                        double length = Math.Sqrt(Math.Pow(prevPt.X - nextPt.X, 2) + Math.Pow(prevPt.Y - nextPt.Y, 2));
                        if (length > 3)
                        {
                            CvInvoke.Circle(ResultFrame, nextPt, 1, new MCvScalar(0, 255, 0), 2);
                        }
                    }
                    //Copy next frame to previous frame for next motion
                    prevFrame      = nextFrame.Clone();
                    prevFrameEmpty = false;
                }
                else if (prevFrameEmpty)
                {
                    prevFrame      = nextFrame.Clone();
                    prevFrameEmpty = false;
                }

                //Display results in picturebox to display
                captureImageBox.Image = frame.ToImage <Bgr, byte>().Bitmap;
                resultImageBox.Image  = ResultFrame.ToImage <Bgr, byte>().Bitmap;
            }
        }
Ejemplo n.º 12
0
    public override void CalculateWeights(Mat image, ImageFeatureMap target)
    {
        DetectionTime = 0;
        if (!Enabled)
        {
            return;
        }
        byte[]   status;
        float[]  errTracker;
        PointF[] features;



        float W = image.Width;
        float H = image.Height;

        if (_isFirstFrame ||
            _prevImage.Width != image.Width ||
            _prevImage.Height != image.Height)
        {
            _prevImage    = image.Clone();
            _isFirstFrame = false;
            return;
        }

        DateTime t = DateTime.Now;

        if (_currPoints == null || _currPoints.Length < 50 ||
            (t - _time).TotalSeconds > Params.OFParameters.FeaturesUpdateTime)
        {
            _time = t;
            UnityEngine.Debug.Log("Recalculating feature points");

            GFTTDetector _GFTTdetector = new GFTTDetector(Params.OFParameters.MaxFeaturesCount);
            MKeyPoint[]  featPoints    = _GFTTdetector.Detect(image, null);

            _prevPoints = new PointF[featPoints.Length];
            int i = 0;
            foreach (var k in featPoints)
            {
                _prevPoints [i] = k.Point;
                ++i;
            }

            _currPoints = _prevPoints;
        }

        Stopwatch watch;

        watch = Stopwatch.StartNew();
        try{
            _criteria.Type    = Params.OFParameters.CriteriaType;
            _criteria.MaxIter = Params.OFParameters.Iterations;
            _criteria.Epsilon = Params.OFParameters.Epsilon;
            CvInvoke.CalcOpticalFlowPyrLK(_prevImage, image, _prevPoints, new Size((int)Params.OFParameters.SearchWindow.x, (int)Params.OFParameters.SearchWindow.y),
                                          Params.OFParameters.Level, _criteria, out features, out status, out errTracker);

            //calculate homography matrix
            CvInvoke.FindHomography(_prevPoints, features, _homography, Emgu.CV.CvEnum.HomographyMethod.Default);
        }catch (Exception e) {
            UnityEngine.Debug.Log(e.Message);
            return;
        }
        watch.Stop();
        DetectionTime = watch.ElapsedMilliseconds;

        //calculate homography transformation, and remove it from points
        Matrix4x4 m = new Matrix4x4();

        m.SetRow(0, new Vector4((float)_homography[0, 0], (float)_homography[0, 1], 0, (float)_homography[0, 2]));
        m.SetRow(1, new Vector4((float)_homography[1, 0], (float)_homography[1, 1], 0, (float)_homography[1, 2]));
        m.SetRow(2, new Vector4(0, 0, 1, 0));
        m.SetRow(3, new Vector4((float)_homography[2, 0], (float)_homography[2, 1], 0, (float)_homography[2, 2]));
        Matrix4x4 homographyInverse = Matrix4x4.Inverse(m);         //get the inverse


        //next, fill weight map


        Vector2 direction = new Vector2((float)_homography [0, 2], (float)_homography [1, 2]);

        direction.Normalize();
        _opticalFlow.Clear();
        int count = 0;

        for (int i = 0; i < features.Length; ++i)
        {
            Vector3 dp   = m * new Vector3(features [i].X, features [i].Y, 0);
            float   dist = (dp.x - _prevPoints [i].X) * (dp.x - _prevPoints [i].X) +
                           (dp.y - _prevPoints [i].Y) * (dp.y - _prevPoints [i].Y);
            if (dist > Params.OFParameters.MinDistance * Params.OFParameters.MinDistance &&
                dist < Params.OFParameters.MaxDistance * Params.OFParameters.MaxDistance)
            {
                //check if the calculated point belongs to the object motion or to camera motion
                //Vector3 d = new Vector3 (features [i].X - dp.x, features [i].Y - dp.y,0);

                /*	float len= Mathf.Sqrt(dist);//dp.magnitude;
                 *      if (len < Params.OFParameters.FeatureSimilarityThreshold) {
                 *              continue;//skip this point, correlated with camera motion
                 *      }*/
                /*
                 * Vector3 d = new Vector3 (features [i].X - _currPoints [i].X, features [i].Y - _currPoints [i].Y,0);
                 * d.Normalize ();
                 * float dp = Vector2.Dot (d, direction);
                 * if (dp > Params.OFParameters.FeatureSimilarityThreshold) {
                 *      continue;//skip this point, correlated with camera motion
                 * }*/
                // add this point
                ++count;
                float x = features [i].X / (float)W;
                float y = (features [i].Y / (float)H);
                if (x > 1 || x < 0 || y > 1 || y < 0)
                {
                    continue;
                }
                float w = 20 / W;              // Mathf.Abs(_currPoints [i].X - features [i].X)/W;
                float h = 20 / H;              //Mathf.Abs(_currPoints [i].Y - features [i].Y)/H;
                Rect  r = new Rect(x - w / 2.0f, y - h / 2.0f /*1-y-h*/, w, h);
                //target.SetWeight (x,1-y,1.0f);
                target.FillRectangle(r.x, r.y, r.width, r.height, 1);

                TrackedFeature f = new TrackedFeature();
                f.v1 = new Vector2(_currPoints[i].X / W, _currPoints[i].Y / H);
                f.v2 = new Vector2(features [i].X / W, features [i].Y / H);
                _opticalFlow.Add(f);
            }
        }

        if (count > features.Length / 10)
        {
            _featuresDetected = true;
        }
        else
        {
            _featuresDetected = false;
        }


        if (features != null)
        {
            lock (_objectsLock) {
                _prevPoints = _currPoints;
                _currPoints = features;
            }
        }

        _prevImage = image.Clone();
    }
Ejemplo n.º 13
0
        private void goruntual(object sender, EventArgs e)
        {
            try { capt.Retrieve(kare); }
            catch (Exception exc)
            {
                console1.Invoke((Action) delegate { console1.AppendText("CaptureError:" + exc.Message + "\n"); });
            }



            karei = kare.ToImage <Bgr, Byte>();
            // karei.Flip(FlipType.Horizontal);
            //karei.Rotate(60,new PointF(320,24),Inter.Area,new Bgr(0,0,0),true);



            karei._GammaCorrect(1);



            kareg = karei.Convert <Gray, Byte>();                              //siyah beyaza çevrim

            veriler = sinif.DetectMultiScale(kareg, 1.1, 4, new Size(80, 80)); //tanıma

            if (veriler.Length != 0)
            {
                for (int i = 1; i < veriler.Length; i++)
                {
                    if ((veriler[i].Width * veriler[i].Height) > (veriler[0].Width * veriler[0].Height))
                    {
                        veriler[0] = veriler[i];
                    }
                }
            }

            // en büyüğüne 0 indisine alma



            try
            {
                takip.step(veriler);

                processedveri = takip.al();
                #region yuz ve göz işleme

                if (processedveri.Length > 0 && takip.bulundu)
                {
                    karei.ROI = processedveri[0];
                    faceim    = karei.Copy();
                    faceg     = faceim.Convert <Gray, Byte>();
                    Rectangle[] gozrect = gozler.DetectMultiScale(faceg, 1.1, 4, new Size(20, 20));    //tanıma


                    if (gozrect.Length > 0)
                    {
                        for (int i = 0; i < gozrect.Length; i++)
                        {
                            if ((gozrect[i].Width * gozrect[i].Height) > (gozrect[0].Width * gozrect[0].Height))
                            {
                                gozrect[0] = gozrect[i];
                            }
                        }    //en büyüğü alma
                        gozrect[0].Y      -= 30;
                        gozrect[0].Height += 40;
                        gozrect[0].X      -= 5;
                        gozrect[0].Width  += 10;
                    }

                    if (gozrect.Length > 0)
                    {
                        filtre.besle(gozrect[0], fps);
                        Rectangle EyeRect = filtre.al(faceim.Width, faceim.Height);
                        faceim.ROI = EyeRect;
                        Image <Bgr, Byte> Gozim = faceim.Copy();

                        Image <Hsv, Byte> GozimHSV = Gozim.Convert <Hsv, Byte>();

                        Image <Gray, Byte> RangeMask = GozimHSV.InRange(new Hsv(13, 10, 58), new Hsv(19, 40, 180));

                        faceim.ROI = new Rectangle();

                        eyebox.Image = Gozim;
                        faceim.Draw(EyeRect, new Bgr(0, 0, 255), 2);
                    }


                    facebox.Image = faceim;
                    karei.ROI     = new Rectangle();
                    faceg         = null;
                }
            }
            catch (Exception exc)
            {
                console1.Invoke((Action) delegate { console1.AppendText("Gozrect:" + exc.Message + "\n"); });
            }
            #endregion

            try
            {
                if (takip.bulundu)
                {
                    if (program == mod.Tanıma)
                    {
                        karefactor(ref processedveri[0], 0.5f);
                    }



                    kareg.ROI = processedveri[0];


                    faceg     = kareg.Copy();
                    kareg.ROI = new Rectangle();


                    if (program == mod.Tanıma && !modtimer.IsRunning)
                    {
                        modtimer.Start();
                    }

                    foreach (Rectangle veri in processedveri)
                    {
                        karei.Draw(veri, new Bgr(255, 0, 0));    // kalmandan gelen verileri ekrana çizme ekrana çizme
                    }
                    if (program == mod.Tanıma)
                    {
                        #region tanıma
                        stopFareForm();

                        MKeyPoint[] noktalar = gftt.Detect(faceg);



                        noktalarf[0] = new PointF[noktalar.Length];

                        for (int i = 0; i < noktalar.Length; i++)
                        {
                            noktalarf[0][i].X = noktalar[i].Point.X;
                            noktalarf[0][i].Y = noktalar[i].Point.Y;
                        }

                        faceg.FindCornerSubPix(noktalarf, new Size(5, 5), new Size(-1, -1), new MCvTermCriteria(60, .05d));

                        for (int i = 0; i < noktalarf[0].Length; i++)    //koordinasyon
                        {
                            noktalarf[0][i].X += processedveri[0].X;
                            noktalarf[0][i].Y += processedveri[0].Y;
                        }
                        // }
                        ///Deneme başlangıç yeri
                        ///

                        /*noktalarf[0] = new PointF[]{
                         *   new PointF(processedveri[0].X+processedveri[0].Width/2,processedveri[0].Top),
                         *   new PointF(processedveri[0].Left,processedveri[0].Y+processedveri[0].Height/2),
                         *   new PointF(processedveri[0].Right,processedveri[0].Y+processedveri[0].Height/2),
                         *   new PointF(processedveri[0].X+processedveri[0].Width/2,processedveri[0].Bottom),
                         * };*/

                        ///Bitiş yeri
                        ///

                        if (takip.yuzhizi >= 32.0)
                        {
                            modtimer.Reset();
                        }

                        if (modtimer.ElapsedMilliseconds >= 2200)
                        {
                            program = mod.Takip;
                            eskiftr = noktalarf[0];
                            renkler = new List <Bgr>(new Bgr[noktalarf[0].Length]);
                            for (int i = 0; i < renkler.Count; i++)
                            {
                                renkler[i] = new Bgr(255, 255, 255);
                            }

                            //pozlama açılıyor

                            pozlama = new Pozlama(noktalarf[0], processedveri[0], new Size(640, 480));
                        }
                        Bgr noktarengi = new Bgr(255, 0, 0);

                        foreach (PointF aa in noktalarf[0])
                        {
                            karei.Draw(new CircleF(new PointF(aa.X, aa.Y), 2), noktarengi, 1);
                        }


                        #endregion
                    }
                }

                else
                {
                    modtimer.Stop();
                    modtimer.Reset();
                }
            }
            catch (Exception exc)
            {
                console1.Invoke((Action) delegate { console1.AppendText("" + exc.Message + "\n"); });
            }

            if (program == mod.Takip)
            {
                //Fare form oluşturulmamışsa açılır
                startFareForm();
                float[]  trackError;
                byte[]   status;
                PointF[] noktayeni = new PointF[eskiftr.Length];



                #region opticalFlow algoritması
                modtimer.Stop();
                modtimer.Reset();
                Image <Gray, Byte>[] pyramid = eskikare.BuildPyramid(0);
                //pyramid oluşturuldu

                CvInvoke.CalcOpticalFlowPyrLK(pyramid[0], kareg, eskiftr, new Size(60, 60), 15, new MCvTermCriteria(150, 1d), out noktayeni, out status, out trackError);



                #endregion


                List <PointF> liste = new List <PointF>(noktayeni);
                if (takip.bulundu)
                {
                    for (int j = 0; j < liste.Count; j++)
                    {
                        if (!(

                                liste[j].X >= processedveri[0].Left
                                &&
                                liste[j].Y >= processedveri[0].Top
                                &&
                                liste[j].X <= processedveri[0].Right
                                &&
                                liste[j].Y <= processedveri[0].Bottom


                                ))
                        {
                            int azalim = 28;

                            var renk = renkler[j];
                            renk.Blue  -= azalim;
                            renk.Green -= azalim;

                            renkler[j] = renk;
                        }
                        else
                        {
                            renkler[j] = new Bgr(255, 255, 255);
                        }
                        if (renkler[j].Blue <= 30)
                        {
                            renkler.RemoveAt(j);
                            liste.RemoveAt(j);
                            pozlama.NoktaSil(j);
                            program = mod.Tanıma;
                        }
                    }
                }
                else
                {
                    for (int i = 0; i < renkler.Count; i++)
                    {
                        renkler[i] = new Bgr(255, 255, 255);
                    }
                }


                noktayeni = liste.ToArray();


                //ypr değerlerini yazdırma
                int index = 0;
                foreach (PointF aa in noktayeni)
                {
                    karei.Draw(new CircleF(new PointF(aa.X, aa.Y), 2), renkler[index], 3);
                    index++;
                }



                eskiftr = noktayeni;
                float[] rotasyon = pozlama.rotasyon(eskiftr, processedveri[0]);
                imlec.feed(rotasyon[0], rotasyon[1], (float)fps);
                this.Invoke((Action) delegate { fareform.mouseVelocity = imlec.Hiz; });
                console1.Invoke((Action) delegate { console1.AppendText("" + fareform.tickIndex + "\n"); });

                /*yprgosterge.Invoke((Action)delegate
                 * {
                 *  yprgosterge.Text = "Yaw:" + Math.Round(rotasyon[0], 2, MidpointRounding.AwayFromZero) + "Pitch:" + Math.Round(rotasyon[1], 2, MidpointRounding.AwayFromZero) + "Roll:" + Math.Round(rotasyon[2], 2, MidpointRounding.ToEven);
                 * });*/
            }

            eskikare      = kareg;
            karei.ROI     = new Rectangle();
            goruntu.Image = karei;
            if (!fpstimer.IsRunning)
            {
                fpstimer.Start();
            }
            else
            {
                fps = 1000.0 / fpstimer.ElapsedMilliseconds;
                fps = Math.Round(fps, 3);
                fpstimer.Reset();
                fpstimer.Start();
                karei.Draw("FPS:" + fps, new System.Drawing.Point(30, 48), FontFace.HersheyComplex, 0.5, new Bgr(0, 0, 0), 1);
            }



            GC.Collect();



            /* catch (Exception exc)
             * {
             *
             *
             *   console1.Invoke((Action)delegate { console1.AppendText(exc.Message + "\n"); });
             *
             *
             * }*/
        }
        /// <summary>
        /// takes the video and process it two frames at a time to calculate
        /// optical flow features and save them on the disk.
        /// </summary>
        /// <param name="vid">Path of the video on the disk.</param>
        /// <param name="save_path">Path to save the features on the disk.</param>
        /// <returns></returns>
        public void Extract_Featurers2(String vid, String save_path)
        {
            int mm = 0;

            try
            {
                mag          = new Mat();
                ang          = new Mat();
                frame        = new Mat();
                prev_frame   = new Mat();
                cap          = new VideoCapture(vid);
                total_frames = Convert.ToInt32(cap.GetCaptureProperty(CapProp.FrameCount));
                F_L          = new List <int>();


                frame      = cap.QueryFrame();
                prev_frame = frame;

                Console.WriteLine(total_frames);
            }
            catch (NullReferenceException except)
            {
                Console.WriteLine(except.Message);
            }
            //17900
            while (mm < total_frames - 2)
            {
                try
                {
                    prev_frame = frame;
                    frame      = cap.QueryFrame();

                    Bitmap image = new Bitmap(frame.Bitmap);

                    // Create a new FAST Corners Detector
                    FastCornersDetector fast = new FastCornersDetector()
                    {
                        Suppress  = true, // suppress non-maximum points
                        Threshold = 70    // less leads to more corners
                    };

                    // Process the image looking for corners
                    List <IntPoint> points = fast.ProcessImage(image);

                    // Create a filter to mark the corners
                    PointsMarker marker = new PointsMarker(points);

                    // Apply the corner-marking filter
                    Bitmap markers = marker.Apply(image);

                    // Show on the screen
                    //Accord.Controls.ImageBox.Show(markers);

                    // Use it to extract interest points from the Lena image:
                    List <IntPoint> descriptors = fast.ProcessImage(image);
                    PointF[]        features    = new PointF[descriptors.Count];

                    int c = 0;
                    foreach (IntPoint p in descriptors)
                    {
                        features[c] = new PointF(p.X, p.Y);
                        c++;
                    }

                    ImageViewer viewer = new ImageViewer();

                    Image <Gray, Byte> prev_grey_img = new Image <Gray, byte>(frame.Width, frame.Height);
                    Image <Gray, Byte> curr_grey_img = new Image <Gray, byte>(frame.Width, frame.Height);
                    curr_grey_img = frame.ToImage <Gray, byte>();
                    prev_grey_img = prev_frame.ToImage <Gray, Byte>();

                    PointF[] shiftedFeatures;
                    Byte[]   status;
                    float[]  trackErrors;

                    CvInvoke.CalcOpticalFlowPyrLK(prev_grey_img, curr_grey_img, features, new Size(9, 9), 3, new MCvTermCriteria(20, 0.05),
                                                  out shiftedFeatures, out status, out trackErrors);



                    //Image<Gray, Byte> displayImage = cap.QueryFrame().ToImage<Gray, Byte>();
                    //for (int i = 0; i < features.Length; i++)
                    //    displayImage.Draw(new LineSegment2DF(features[i], shiftedFeatures[i]), new Gray(), 2);


                    for (int i = 0; i < features.Length; i++)
                    {
                        CvInvoke.Circle(frame, System.Drawing.Point.Round(shiftedFeatures[i]), 4, new MCvScalar(0, 255, 255), 2);
                    }

                    int mean_X = 0;
                    int mean_Y = 0;

                    foreach (PointF p in shiftedFeatures)
                    {
                        mean_X += (int)p.X;
                        mean_Y += (int)p.Y;
                    }

                    mean_X /= shiftedFeatures.Length;
                    mean_Y /= shiftedFeatures.Length;

                    F_L.Add(mean_X);
                    F_L.Add(mean_Y);


                    //double[] inner = new double[] { mean_X, mean_Y };
                    //featuers_list[mm] = inner;

                    //viewer.Image = frame;
                    //viewer.ShowDialog();
                    //prev_frame = frame;

                    //Console.WriteLine("frame:{0} " + mm);
                    Console.WriteLine("frame:{0} " + mm + "  X:{1} " + mean_X + "   Y:{2} " + mean_Y);

                    mm++;
                }
                catch (Exception e)
                { Console.WriteLine(e.Message); }
            }
            //int go = 0;
            //foreach (double[] arr in featuers_list)
            //{
            //    Console.Write("frame:{0} ", go++);
            //    foreach (double d in arr)
            //        Console.Write(d + "    ");

            //    Console.WriteLine();
            //}
            Serialize.SerializeObject(F_L, save_path);
        }
Ejemplo n.º 15
0
        public bool Track(Mat img)
        {
            //Track detected features
            if (_prevGray.IsEmpty)
            {
                Trace.WriteLine("Can't track: empty prev frame."); return(false);
            }

            var corners = new VectorOfPointF();
            var status  = new VectorOfByte();
            var errors  = new VectorOfFloat();

            CvInvoke.CvtColor(img, _currGray, ColorConversion.Bgr2Gray);

            CvInvoke.CalcOpticalFlowPyrLK(_prevGray, _currGray, Utils.GetPointsVector(_trackedFeatures), corners, status, errors, new Size(11, 11), 0, new MCvTermCriteria(100));
            _currGray.CopyTo(_prevGray);

            if (CvInvoke.CountNonZero(status) < status.Size * 0.8)
            {
                Trace.WriteLine("Tracking failed.");
                _bootstrapping = false;
                _canCalcMvm    = false;
                return(false);
            }

            _trackedFeatures = Utils.GetKeyPointsVector(corners);

            Utils.KeepVectorsByStatus(ref _trackedFeatures, ref _trackedFeatures3D, status);

            Console.WriteLine("tracking.");

            _canCalcMvm = (_trackedFeatures.Size >= MinInliers);

            if (_canCalcMvm)
            {
                //Perform camera pose estimation for AR
                var rvec = new Mat();
                var tvec = new Mat();

                CvInvoke.SolvePnP(_trackedFeatures3D, Utils.GetPointsVector(_trackedFeatures), _calibrationInfo.Intrinsic, _calibrationInfo.Distortion, _raux, _taux, !_raux.IsEmpty);

                _raux.ConvertTo(rvec, DepthType.Cv32F);
                _taux.ConvertTo(tvec, DepthType.Cv64F);

                var pts = new MCvPoint3D32f[] {
                    new MCvPoint3D32f(0.01f, 0, 0),
                    new MCvPoint3D32f(0, 0.01f, 0),
                    new MCvPoint3D32f(0, 0, 0.01f)
                };
                var axis = new VectorOfPoint3D32F(pts);

                var imgPoints = new VectorOfPointF();
                CvInvoke.ProjectPoints(axis, _raux, _taux, _calibrationInfo.Intrinsic, _calibrationInfo.Distortion, imgPoints);

                var centerPoint = new Point((int)_trackedFeatures[0].Point.X, (int)_trackedFeatures[0].Point.Y);

                var xPoint = new Point((int)imgPoints[0].X, (int)imgPoints[0].Y);
                var yPoint = new Point((int)imgPoints[1].X, (int)imgPoints[1].Y);
                var zPoint = new Point((int)imgPoints[2].X, (int)imgPoints[2].Y);

                CvInvoke.Line(img, centerPoint, xPoint, new MCvScalar(255, 0, 0), 5); //blue x-ax
                CvInvoke.Line(img, centerPoint, yPoint, new MCvScalar(0, 255, 0), 5); //green y-ax
                CvInvoke.Line(img, centerPoint, zPoint, new MCvScalar(0, 0, 255), 5); //red z-ax

                var rot = new Mat(3, 3, DepthType.Cv32F, 3);

                CvInvoke.Rodrigues(rvec, rot);
            }

            return(true);
        }
Ejemplo n.º 16
0
        void ProcessFrame(object sender, EventArgs e)
        {
            Mat frame         = _cameraCapture.QueryFrame();
            Mat smoothedFrame = new Mat();

            CvInvoke.GaussianBlur(frame, smoothedFrame, new Size(3, 3), 1);    //高斯滤波
            CvInvoke.CvtColor(frame, curgray, ColorConversion.Bgr2Gray);       //灰度图
            goodFeaturesToTrack = new GFTTDetector(maxCount, qLevel, minDist); //关键点检测初始化
            frame.CopyTo(KeyPointPic);

            MKeyPoint[] keypoint = goodFeaturesToTrack.Detect(curgray);//关键点检测
            for (int i = 0; i < keypoint.Count(); i++)
            {
                System.Drawing.Point point = System.Drawing.Point.Truncate(keypoint[i].Point);//获得关键点的坐标位置,以 Point 类型。
                CvInvoke.Circle(KeyPointPic, point, 3, new MCvScalar(0, 0, 255), 1);
            }

            if (prevFeature.Count() < 10)                                    //特征点太少了,重新检测特征点
            {
                MKeyPoint[] keypoints = goodFeaturesToTrack.Detect(curgray); //关键点检测
                AddNewPoint = keypoints.Count();
                Array.Resize(ref prevFeature, keypoints.Count());
                Array.Resize(ref initial, keypoints.Count());
                for (int i = 0; i < keypoints.Count(); i++)
                {
                    System.Drawing.Point point = System.Drawing.Point.Truncate(keypoints[i].Point);//获得关键点的坐标位置,以 Point 类型。
                    prevFeature[i] = point;
                    initial[i]     = point;
                    CvInvoke.Circle(curgray, point, 3, new MCvScalar(0, 0, 255), 1);
                }
            }
            if (pregray.Size.IsEmpty)
            {
                curgray.CopyTo(pregray);                      //第一帧
            }
            MCvTermCriteria termcrit = new MCvTermCriteria(6);

            CvInvoke.CalcOpticalFlowPyrLK(pregray, curgray, prevFeature, curgray.Size, 2, termcrit, out currFeature, out status, out err, 0, 0.0001);
            AddNewPoint = prevFeature.Count();
            // 去掉一些不好的特征点
            int k = 0;

            for (int i = 0; i < currFeature.Count(); i++)
            {
                try
                {
                    if (acceptTrackedPoint(i))
                    {
                        initial[k]       = initial[i];
                        currFeature[k++] = currFeature[i];
                    }
                }
                catch { }
            }

            Array.Resize(ref currFeature, k);
            Array.Resize(ref initial, k);

            frame.CopyTo(Flow);
            for (int i = 0; i < currFeature.Count(); i++)
            {
                //CvInvoke.Circle(Flow, Point.Truncate(currFeature[i]), 3, new MCvScalar(0, 0, 255),1);
                CvInvoke.Line(Flow, Point.Truncate(initial[i]), Point.Truncate(currFeature[i]), new Bgr(Color.DarkOrange).MCvScalar, 2);
            }



            imageBox1.Image = frame;
            imageBox2.Image = KeyPointPic;
            imageBox3.Image = Flow;

            curgray.CopyTo(pregray);
            Array.Resize(ref prevFeature, currFeature.Count());
            for (int i = 0; i < currFeature.Count(); i++)
            {
                prevFeature[i] = currFeature[i];
            }
            //Thread t = new Thread(() =>
            //{
            //    this.mainPages.Invoke(new Action(delegate ()
            //    {


            //    }));
            //});
            //t.Start();
        }
Ejemplo n.º 17
0
        public bool BootstrapTrack(Mat img)
        {
            #region Trace

            Trace.WriteLine($"BootstrapTrack iteration ({ _trackedFeatures.Size}).");
            Trace.WriteLine("--------------------------");
            Trace.Indent();

            #endregion

            //Track detected features
            if (_prevGray.IsEmpty)
            {
                const string error = "Previous frame is empty. Bootstrap first.";
                Trace.TraceError(error);
                throw new Exception(error);
            }

            if (img.IsEmpty || !img.IsEmpty && img.NumberOfChannels != 3)
            {
                const string error = "Image is not appropriate (Empty or wrong number of channels).";
                Trace.TraceError(error);
                throw new Exception(error);
            }

            var corners = new VectorOfPointF();
            var status  = new VectorOfByte();
            var errors  = new VectorOfFloat();

            var currGray = new Mat();
            CvInvoke.CvtColor(img, currGray, ColorConversion.Bgr2Gray);

            CvInvoke.CalcOpticalFlowPyrLK(_prevGray, currGray, Utils.GetPointsVector(_trackedFeatures), corners,
                                          status, errors, new Size(11, 11), 3, new MCvTermCriteria(20, 0.03));
            currGray.CopyTo(_prevGray);

            #region Trace

            Trace.WriteLine($"Tracked first point: ({_trackedFeatures[0].Point.X}, {_trackedFeatures[0].Point.Y}) / Found first corner = ({corners[0].X}, {corners[0].Y})");
            Trace.WriteLine($"Tracked second point: ({_trackedFeatures[1].Point.X}, {_trackedFeatures[1].Point.Y}) / Found second corner = ({corners[1].X}, {corners[1].Y})");
            Trace.WriteLine($"Tracked third point: ({_trackedFeatures[2].Point.X}, {_trackedFeatures[2].Point.Y}) / Found third corner = ({corners[2].X}, {corners[2].Y})");

            #endregion

            for (int j = 0; j < corners.Size; j++)
            {
                if (status[j] == 1)
                {
                    var p1 = new Point((int)_trackedFeatures[j].Point.X, (int)_trackedFeatures[j].Point.Y);
                    var p2 = new Point((int)corners[j].X, (int)corners[j].Y);

                    CvInvoke.Line(img, p1, p2, new MCvScalar(120, 10, 20));
                }
            }

            if (CvInvoke.CountNonZero(status) < status.Size * 0.8)
            {
                Trace.TraceError("Tracking failed.");
                throw new Exception("Tracking failed.");
            }

            _trackedFeatures = Utils.GetKeyPointsVector(corners);

            Utils.KeepVectorsByStatus(ref _trackedFeatures, ref _bootstrapKp, status);

            Trace.WriteLine($"{_trackedFeatures.Size} features survived optical flow.");

            if (_trackedFeatures.Size != _bootstrapKp.Size)
            {
                const string error = "Tracked features vector size is not equal to bootstrapped one.";
                Trace.TraceError(error);
                throw new Exception(error);
            }

            #region Trace

            Trace.WriteLine($"Bootstrap first point: ({_bootstrapKp[0].Point.X}, {_bootstrapKp[0].Point.Y}) / Found first corner = ({corners[0].X}, {corners[0].Y})");
            Trace.WriteLine($"Bootstrap second point: ({_bootstrapKp[1].Point.X}, {_bootstrapKp[1].Point.Y}) / Found second corner = ({corners[1].X}, {corners[1].Y})");
            Trace.WriteLine($"Bootstrap third point: ({_bootstrapKp[2].Point.X}, {_bootstrapKp[2].Point.Y}) / Found third corner = ({corners[2].X}, {corners[2].Y})");

            #endregion

            //verify features with a homography
            var inlierMask = new VectorOfByte();
            var homography = new Mat();
            if (_trackedFeatures.Size > 4)
            {
                CvInvoke.FindHomography(Utils.GetPointsVector(_trackedFeatures), Utils.GetPointsVector(_bootstrapKp), homography, HomographyMethod.Ransac, RansacThreshold, inlierMask);
            }

            int inliersNum = CvInvoke.CountNonZero(inlierMask);

            var m = new Matrix <double>(homography.Rows, homography.Cols, homography.DataPointer);

            m.Dispose();

            Trace.WriteLine($"{inliersNum} features survived homography.");

            if (inliersNum != _trackedFeatures.Size && inliersNum >= 4 && !homography.IsEmpty)
            {
                Utils.KeepVectorsByStatus(ref _trackedFeatures, ref _bootstrapKp, inlierMask);
            }
            else if (inliersNum < MinInliers)
            {
                Trace.TraceError("Not enough features survived homography.");
                return(false);
            }

            var bootstrapKpOrig     = new VectorOfKeyPoint(_bootstrapKp.ToArray());
            var trackedFeaturesOrig = new VectorOfKeyPoint(_trackedFeatures.ToArray());

            //Attempt at 3D reconstruction (triangulation) if conditions are right
            var rigidT = CvInvoke.EstimateRigidTransform(Utils.GetPointsVector(_trackedFeatures).ToArray(), Utils.GetPointsVector(_bootstrapKp).ToArray(), false);
            var matrix = new Matrix <double>(rigidT.Rows, rigidT.Cols, rigidT.DataPointer);

            #region Trace

            Trace.WriteLine($"Track first point: ({_trackedFeatures[0].Point.X}, {_trackedFeatures[0].Point.Y}) / Bootstrap first point = ({_bootstrapKp[0].Point.X}, {_bootstrapKp[0].Point.Y})");
            Trace.WriteLine($"Track 10th point: ({_trackedFeatures[10].Point.X}, {_trackedFeatures[10].Point.Y}) / Bootstrap 10th point = ({_bootstrapKp[10].Point.X}, {_bootstrapKp[10].Point.Y})");
            Trace.WriteLine($"Track last point: ({_trackedFeatures[_trackedFeatures.Size - 1].Point.X}, {_trackedFeatures[_trackedFeatures.Size - 1].Point.Y}" +
                            $") / Bootstrap third point = ({_bootstrapKp[_bootstrapKp.Size - 1].Point.X}, {_bootstrapKp[_bootstrapKp.Size - 1].Point.Y})");

            Trace.WriteLine($"Rigid matrix: [ [ {matrix[0, 0]}, {matrix[0, 1]}, {matrix[0, 2]} ] [ {matrix[1, 0]}, {matrix[1, 1]}, {matrix[1, 2]} ] ].");
            Trace.WriteLine($"Rigid: {CvInvoke.Norm(matrix.GetCol(2))}");

            #endregion

            if (CvInvoke.Norm(matrix.GetCol(2)) > 100)
            {
                //camera motion is sufficient
                var p1 = new Matrix <double>(3, 4);
                p1.SetIdentity();
                var result = OpenCvUtilities.CameraPoseAndTriangulationFromFundamental(_calibrationInfo, _trackedFeatures, _bootstrapKp, p1);

                _trackedFeatures = result.FilteredTrackedFeaturesKp;
                _bootstrapKp     = result.FilteredBootstrapKp;

                if (result.Result)
                {
                    _trackedFeatures3D = result.TrackedFeatures3D;
                    var trackedFeatures3Dm = Utils.Get3dPointsMat(_trackedFeatures3D);

                    var eigenvectors = new Mat();
                    var mean         = new Mat();
                    CvInvoke.PCACompute(trackedFeatures3Dm, mean, eigenvectors);
                    var eigenvectorsMatrix = new Matrix <double>(eigenvectors.Rows, eigenvectors.Cols, eigenvectors.DataPointer);

                    int numInliers    = 0;
                    var normalOfPlane = eigenvectorsMatrix.GetRow(2).ToUMat().ToMat(AccessType.Fast);
                    //eigenvectors.GetRow(2).CopyTo(normalOfPlane);
                    CvInvoke.Normalize(normalOfPlane, normalOfPlane);

                    var normalOfPlaneMatrix = new Matrix <double>(normalOfPlane.Rows, normalOfPlane.Cols, normalOfPlane.DataPointer);
                    Trace.WriteLine($"normal of plane: {normalOfPlaneMatrix[0, 0]}");
                    //cv::Vec3d x0 = pca.mean;
                    //double p_to_plane_thresh = sqrt(pca.eigenvalues.at<double>(2));
                }

                return(true);
            }

            #region Trace

            Trace.Unindent();
            Trace.WriteLine("--------------------------");

            #endregion

            return(false);
        }
Ejemplo n.º 18
0
        public PointF[] CalculateOpticalFlow_Sparse(Mat newFrame, Rectangle roi, Rectangle[] rois, bool forceRecheck = false)
        {
            //Mat newFrame = currFrame.Clone();
            if (prePoints == null || forceRecheck)
            {
                #region Extract Points
                // if running for first frame
                GetCorners(newFrame, roi);

                try
                {
                    //preFrame = new Mat();
                    //newFrame.CopyTo(preFrame);
                    if (prePoints.Length != 0)
                    {
                        CvInvoke.CalcOpticalFlowPyrLK(newFrame, newFrame, prePoints, new Size(9, 9), 3, new MCvTermCriteria(20, 1), out currFeatures, out status, out trackError);
                    }
                }
                catch (Exception e)
                {
                    MessageBox.Show("In CalculateOpticalFlow_Sparse (true) : " + e.Message);
                }
                prePoints = currFeatures;

                preFrame = newFrame.Clone();

                return(null);

                #endregion
            }
            else
            {
                #region Find Points

                if (useOFCleaning)
                {
                    prePoints = CleanOFPoints(prePoints, rois);


                    if (needToRecalculateOF)
                    {
                        // this will recalculate the OF points
                        CalculateOpticalFlow_Sparse(newFrame, rectOfPerson, rois, true);

                        Debug.AddTrackText("Recalculating OF");
                    }
                }

                try
                {
                    CvInvoke.CalcOpticalFlowPyrLK(preFrame, newFrame, prePoints, new Size(9, 9), 3, new MCvTermCriteria(20, 1), out currFeatures, out status, out trackError);
                    prePoints = currFeatures;
                }
                catch (Exception e)
                {
                    MessageBox.Show("In CalculateOpticalFlow_Sparse (false) : " + e.Message);
                }


                #endregion
            }
            preFrame = newFrame.Clone();
            return(currFeatures);
            //preFrame = newFrame.Clone();
            //preFrame = new Mat();
            //newFrame.CopyTo(preFrame);
        }
Ejemplo n.º 19
0
        public int Track(Bitmap bm)
        {
            if (!Active)
            {
                return(0);
            }


            Rectangle  rect    = new Rectangle(0, 0, bm.Width, bm.Height);
            BitmapData bmpData = bm.LockBits(rect, ImageLockMode.ReadWrite, bm.PixelFormat);

            Mat m = new Mat(bm.Height, bm.Width, Emgu.CV.CvEnum.DepthType.Cv8U, 3, bmpData.Scan0, bmpData.Stride);


            PointF[] nextPts = new PointF[prevPoints.Count()];
            byte[]   status  = new byte[prevPoints.Count()];
            float[]  err     = new float[prevPoints.Count()];
            CvInvoke.CalcOpticalFlowPyrLK(PrevImg, m, prevPoints, pyrWinSize, pyrLevel, new MCvTermCriteria(maxIter, eps), out nextPts, out status, out err);


            // foreach (var kp in keyPoints)
            //{
            //     CvInvoke.Circle(m, new Point((int)kp.Point.X+searchROI.Left, (int)kp.Point.Y+searchROI.Top), 5, new MCvScalar(100, 100, 255));
            //}

            PrevImg = m.Clone();

            CvInvoke.Rectangle(m, new Rectangle(searchLocation, searchSize), new MCvScalar(100, 100, 255, 233));

            prevPoints = prevPoints.Where((p, idx) => status[idx] == 1).ToArray();
            nextPts    = nextPts.Where((p, idx) => status[idx] == 1).ToArray();

            double meanX = nextPts.Sum(p => p.X) / nextPts.Count();
            double meanY = nextPts.Sum(p => p.Y) / nextPts.Count();

            for (int i = 0; i < nextPts.Count(); i++)
            //foreach (var np in nextPts)
            {
                PointF pp = prevPoints[i];
                PointF np = nextPts[i];
                if (olflags.HasFlag(OLFlags.Points))
                {
                    CvInvoke.Circle(m, new Point((int)np.X, (int)np.Y), 5, new MCvScalar(100, 100, 255));
                    CvInvoke.Line(m, new Point((int)pp.X, (int)pp.Y), new Point((int)np.X, (int)np.Y), new MCvScalar(100, 255, 100), 1);
                }
                double dist = Math.Sqrt(Math.Pow(np.X - pp.X, 2) + Math.Pow(np.Y - pp.Y, 2));

                if (olflags.HasFlag(OLFlags.Text))
                {
                    CvInvoke.PutText(m, i.ToString(), new Point((int)np.X, (int)np.Y), Emgu.CV.CvEnum.FontFace.HersheyComplexSmall, 1, new MCvScalar(100, 100, 255), 1);
                    string text = i.ToString() + ": " + dist.ToString("0.00");// + ", " + err[i].ToString("#.##");

                    CvInvoke.PutText(m, text, new Point(0, i * 15), Emgu.CV.CvEnum.FontFace.HersheyComplexSmall, 1, new MCvScalar(100, 100, 255));
                }
            }

            if (olflags.HasFlag(OLFlags.Center))
            {
                CvInvoke.Circle(m, new Point((int)meanX, (int)meanY), 7, new MCvScalar(255, 255, 100));
            }



            bm.UnlockBits(bmpData);

            prevPoints = nextPts;


            return(0);
        }