Exemplo n.º 1
0
 public static extern double cvCalibrateCamera2(
     IntPtr objectPoints,
     IntPtr imagePoints,
     IntPtr pointCounts,
     Size imageSize,
     IntPtr intrinsicMatrix,
     IntPtr distortionCoeffs,
     IntPtr rotationVectors,
     IntPtr translationVectors,
     CvEnum.CALIB_TYPE flags);
        /// <summary>
        /// Estimates intrinsic camera parameters and extrinsic parameters for each of the views
        /// </summary>
        /// <param name="objectPoints">The 3D location of the object points. The first index is the index of image, second index is the index of the point</param>
        /// <param name="imagePoints">The 2D image location of the points. The first index is the index of the image, second index is the index of the point</param>
        /// <param name="imageSize">The size of the image, used only to initialize intrinsic camera matrix</param>
        /// <param name="intrinsicParam">The intrisinc parameters, might contains some initial values. The values will be modified by this function.</param>
        /// <param name="calibrationType">cCalibration type</param>
        /// <param name="termCriteria">The termination criteria</param>
        /// <param name="extrinsicParams">The output array of extrinsic parameters.</param>
        /// <returns>The final reprojection error</returns>
        public static double CalibrateCamera(
            MCvPoint3D32f[][] objectPoints,
            PointF[][] imagePoints,
            Size imageSize,
            IntrinsicCameraParameters intrinsicParam,
            CvEnum.CALIB_TYPE calibrationType,
            MCvTermCriteria termCriteria,
            out ExtrinsicCameraParameters[] extrinsicParams)
        {
            Debug.Assert(objectPoints.Length == imagePoints.Length, "The number of images for objects points should be equal to the number of images for image points");
            int imageCount = objectPoints.Length;

            #region get the array that represent the point counts
            int[] pointCounts = new int[objectPoints.Length];
            for (int i = 0; i < objectPoints.Length; i++)
            {
                Debug.Assert(objectPoints[i].Length == imagePoints[i].Length, String.Format("Number of 3D points and image points should be equal in the {0}th image", i));
                pointCounts[i] = objectPoints[i].Length;
            }
            #endregion

            double reprojectionError = -1;
            using (Matrix <float> objectPointMatrix = ToMatrix(objectPoints))
                using (Matrix <float> imagePointMatrix = ToMatrix(imagePoints))
                    using (Matrix <int> pointCountsMatrix = new Matrix <int>(pointCounts))
                        using (Matrix <double> rotationVectors = new Matrix <double>(imageCount, 3))
                            using (Matrix <double> translationVectors = new Matrix <double>(imageCount, 3))
                            {
                                reprojectionError = CvInvoke.cvCalibrateCamera2(
                                    objectPointMatrix.Ptr,
                                    imagePointMatrix.Ptr,
                                    pointCountsMatrix.Ptr,
                                    imageSize,
                                    intrinsicParam.IntrinsicMatrix,
                                    intrinsicParam.DistortionCoeffs,
                                    rotationVectors,
                                    translationVectors,
                                    calibrationType,
                                    termCriteria);

                                extrinsicParams = new ExtrinsicCameraParameters[imageCount];
                                IntPtr matPtr = Marshal.AllocHGlobal(StructSize.MCvMat);
                                for (int i = 0; i < imageCount; i++)
                                {
                                    ExtrinsicCameraParameters p = new ExtrinsicCameraParameters();
                                    CvInvoke.cvGetRow(rotationVectors.Ptr, matPtr, i);
                                    CvInvoke.cvTranspose(matPtr, p.RotationVector.Ptr);
                                    CvInvoke.cvGetRow(translationVectors.Ptr, matPtr, i);
                                    CvInvoke.cvTranspose(matPtr, p.TranslationVector.Ptr);
                                    extrinsicParams[i] = p;
                                }
                                Marshal.FreeHGlobal(matPtr);
                            }
            return(reprojectionError);
        }
Exemplo n.º 3
0
        /// <summary>
        /// Estimates transformation between the 2 cameras making a stereo pair. If we have a stereo camera, where the relative position and orientatation of the 2 cameras is fixed, and if we computed poses of an object relative to the fist camera and to the second camera, (R1, T1) and (R2, T2), respectively (that can be done with cvFindExtrinsicCameraParams2), obviously, those poses will relate to each other, i.e. given (R1, T1) it should be possible to compute (R2, T2) - we only need to know the position and orientation of the 2nd camera relative to the 1st camera. That's what the described function does. It computes (R, T) such that:
        /// R2=R*R1,
        /// T2=R*T1 + T
        /// </summary>
        /// <param name="objectPoints">The 3D location of the object points. The first index is the index of image, second index is the index of the point</param>
        /// <param name="imagePoints1">The 2D image location of the points for camera 1. The first index is the index of the image, second index is the index of the point</param>
        /// <param name="imagePoints2">The 2D image location of the points for camera 2. The first index is the index of the image, second index is the index of the point</param>
        /// <param name="intrinsicParam1">The intrisinc parameters for camera 1, might contains some initial values. The values will be modified by this function.</param>
        /// <param name="intrinsicParam2">The intrisinc parameters for camera 2, might contains some initial values. The values will be modified by this function.</param>
        /// <param name="imageSize">Size of the image, used only to initialize intrinsic camera matrix</param>
        /// <param name="flags">Different flags</param>
        /// <param name="extrinsicParams">The extrinsic parameters which contains:
        /// R - The rotation matrix between the 1st and the 2nd cameras' coordinate systems;
        /// T - The translation vector between the cameras' coordinate systems. </param>
        /// <param name="essentialMatrix">The essential matrix</param>
        /// <param name="termCrit">Termination criteria for the iterative optimiziation algorithm </param>
        /// <param name="foundamentalMatrix">The fundamental matrix</param>
        public static void StereoCalibrate(
            MCvPoint3D32f[][] objectPoints,
            PointF[][] imagePoints1,
            PointF[][] imagePoints2,
            IntrinsicCameraParameters intrinsicParam1,
            IntrinsicCameraParameters intrinsicParam2,
            Size imageSize,
            CvEnum.CALIB_TYPE flags,
            MCvTermCriteria termCrit,
            out ExtrinsicCameraParameters extrinsicParams,
            out Matrix <double> foundamentalMatrix,
            out Matrix <double> essentialMatrix)
        {
            Debug.Assert(objectPoints.Length == imagePoints1.Length && objectPoints.Length == imagePoints2.Length, "The number of images for objects points should be equal to the number of images for image points");

            #region get the matrix that represent the point counts
            int[,] pointCounts = new int[objectPoints.Length, 1];
            for (int i = 0; i < objectPoints.Length; i++)
            {
                Debug.Assert(objectPoints[i].Length == imagePoints1[i].Length && objectPoints[i].Length == imagePoints2[i].Length, String.Format("Number of 3D points and image points should be equal in the {0}th image", i));
                pointCounts[i, 0] = objectPoints[i].Length;
            }
            #endregion

            using (Matrix <float> objectPointMatrix = ToMatrix(objectPoints))
                using (Matrix <float> imagePointMatrix1 = ToMatrix(imagePoints1))
                    using (Matrix <float> imagePointMatrix2 = ToMatrix(imagePoints2))
                        using (Matrix <int> pointCountsMatrix = new Matrix <int>(pointCounts))
                        {
                            extrinsicParams    = new ExtrinsicCameraParameters();
                            essentialMatrix    = new Matrix <double>(3, 3);
                            foundamentalMatrix = new Matrix <double>(3, 3);

                            CvInvoke.cvStereoCalibrate(
                                objectPointMatrix.Ptr,
                                imagePointMatrix1.Ptr,
                                imagePointMatrix2.Ptr,
                                pointCountsMatrix.Ptr,
                                intrinsicParam1.IntrinsicMatrix,
                                intrinsicParam1.DistortionCoeffs,
                                intrinsicParam2.IntrinsicMatrix,
                                intrinsicParam2.DistortionCoeffs,
                                imageSize,
                                extrinsicParams.RotationVector,
                                extrinsicParams.TranslationVector,
                                essentialMatrix.Ptr,
                                foundamentalMatrix.Ptr,
                                termCrit,
                                flags);
                        }
        }
Exemplo n.º 4
0
 public static extern double cvStereoCalibrate(
     IntPtr objectPoints,
     IntPtr imagePoints1,
     IntPtr imagePoints2,
     IntPtr pointCounts,
     IntPtr cameraMatrix1,
     IntPtr distCoeffs1,
     IntPtr cameraMatrix2,
     IntPtr distCoeffs2,
     Size imageSize,
     IntPtr R,
     IntPtr T,
     IntPtr E,
     IntPtr F,
     MCvTermCriteria termCrit,
     CvEnum.CALIB_TYPE flags);