コード例 #1
0
        Matrix <double> FindFundamentalMatrix(CameraModel cam1, CameraModel cam2)
        {
            IntPtr points1Ptr = CvHelper.CreatePointListPointer(CvHelper.NormalizePoints(cam1.Calibration.ObjectPointsProjected, cam1.Calibration.IntrinsicParameters));
            IntPtr points2Ptr = CvHelper.CreatePointListPointer(CvHelper.NormalizePoints(cam2.Calibration.ObjectPointsProjected, cam1.Calibration.IntrinsicParameters));

            Matrix <double> fundamentalMatrix = new Matrix <double>(3, 3);

            IntPtr fundamentalMatrixPtr = CvInvoke.cvCreateMat(3, 3, MAT_DEPTH.CV_32F);

            CvInvoke.cvFindFundamentalMat(points1Ptr, points2Ptr, fundamentalMatrix.Ptr, CV_FM.CV_FM_RANSAC, 3, 0.99, IntPtr.Zero);

            return(fundamentalMatrix);
        }
コード例 #2
0
        private void TrackerService_OnImageReady(object sender, EventArgs e)
        {
            OnImageReadyEventArgs args = (OnImageReadyEventArgs)e;

            IEnumerable <CameraModel> orderedCameras = _cameras.Cameras.OrderBy(camera => camera.Calibration.Index);

            foreach (CameraModel cameraModel1 in orderedCameras)
            {
                foreach (CameraModel cameraModel2 in orderedCameras)
                {
                    if (cameraModel1.Calibration.ObjectPointsProjected == null ||
                        cameraModel2.Calibration.ObjectPointsProjected == null)
                    {
                        continue;
                    }
                    if (cameraModel1.Calibration.Index == cameraModel2.Calibration.Index)
                    {
                        continue;
                    }
                    if (cameraModel1.Calibration.Index > 1)
                    {
                        continue;
                    }

                    IntPtr points1Ptr = CvHelper.CreatePointListPointer(cameraModel1.Calibration.ObjectPointsProjected);
                    IntPtr points2Ptr = CvHelper.CreatePointListPointer(cameraModel2.Calibration.ObjectPointsProjected);

                    Matrix <double> fundamentalMatrix = new Matrix <double>(3, 3);

                    IntPtr fundamentalMatrixPtr = CvInvoke.cvCreateMat(3, 3, MAT_DEPTH.CV_32F);
                    CvInvoke.cvFindFundamentalMat(points1Ptr, points2Ptr, fundamentalMatrix, CV_FM.CV_FM_RANSAC, 3,
                                                  0.99, IntPtr.Zero);

                    Matrix <double> lines1 = new Matrix <double>(8, 3);
                    CvInvoke.cvComputeCorrespondEpilines(points2Ptr, 2, fundamentalMatrix, lines1);

                    Matrix <double> lines2 = new Matrix <double>(8, 3);
                    CvInvoke.cvComputeCorrespondEpilines(points1Ptr, 1, fundamentalMatrix, lines2);

                    for (int i = 0; i < cameraModel1.Calibration.ObjectPointsProjected.Length; i++)
                    {
                        {
                            Point[] points = new Point[2]
                            {
                                new Point(0, (int)-(lines2[i, 2] / lines2[i, 1])),
                                new Point(args.Image.Cols,
                                          (int)(-(lines2[i, 2] + lines2[i, 0] * args.Image.Cols) / lines2[i, 1]))
                            };
                            args.Image.DrawPolyline(points, false, new Bgr(255, 255, 0), 1);
                        }

                        //{
                        //    System.Drawing.Point[] points = new System.Drawing.Point[2]
                        //    {
                        //        new System.Drawing.Point(0, (int) -(lines1[i, 2]/lines1[i, 1])),
                        //        new System.Drawing.Point(img.Cols, (int) (-(lines1[i, 2] + lines1[i, 0] * img.Cols) / lines1[i, 1]))
                        //    };
                        //    img.DrawPolyline(points, false, new Bgr(255, 0, 255), 1);
                        //}
                    }
                }
            }
        }