public static Mat DrawBoard(CharucoBoard board, Size imageSize, int margin)
        {
            Image <Gray, byte> boardImage = new Image <Gray, byte>(imageSize);

            board.Draw(imageSize, boardImage, margin, 1);

            return(boardImage.Mat);
        }
        private double CalibrateCameraCharuco(List <List <Mat> > allCorners, List <Mat> allIds, CharucoBoard board, Size imageSize, Mat cameraMatrix, Mat distCoeffs, List <Mat> rvecs = null, List <Mat> tvecs = null, int calibrationFlags = 0, int minMarkers = 2)
        {
            // prepare data for charuco calibration
            int        nFrames           = allCorners.Count;
            List <Mat> allCharucoCorners = new List <Mat>();
            List <Mat> allCharucoIds     = new List <Mat>();
            List <Mat> filteredImages    = new List <Mat>();

            for (int i = 0; i < nFrames; ++i)
            {
                // interpolate using camera parameters
                Mat currentCharucoCorners = new Mat();
                Mat currentCharucoIds     = new Mat();

                Aruco.interpolateCornersCharuco(allCorners[i], allIds[i], allImgs[i], board, currentCharucoCorners, currentCharucoIds, cameraMatrix, distCoeffs, minMarkers);

                if (charucoIds.total() > 0)
                {
                    allCharucoCorners.Add(currentCharucoCorners);
                    allCharucoIds.Add(currentCharucoIds);
                    filteredImages.Add(allImgs[i]);
                }
                else
                {
                    currentCharucoCorners.Dispose();
                    currentCharucoIds.Dispose();
                }
            }

            if (allCharucoCorners.Count < 1)
            {
                Debug.Log("Not enough corners for calibration.");
                return(-1);
            }

            if (rvecs == null)
            {
                rvecs = new List <Mat>();
            }
            if (tvecs == null)
            {
                tvecs = new List <Mat>();
            }

            return(Aruco.calibrateCameraCharuco(allCharucoCorners, allCharucoIds, board, imageSize, cameraMatrix, distCoeffs, rvecs, tvecs, calibrationFlags));
        }
        private double CalibrateCameraAruco(List <List <Mat> > allCorners, List <Mat> allIds, CharucoBoard board, Size imageSize, Mat cameraMatrix, Mat distCoeffs, List <Mat> rvecs = null, List <Mat> tvecs = null, int calibrationFlags = 0)
        {
            // prepare data for calibration
            int nFrames = allCorners.Count;
            int allLen  = 0;

            int[] markerCounterPerFrameArr = new int[allCorners.Count];
            for (int i = 0; i < nFrames; ++i)
            {
                markerCounterPerFrameArr[i] = allCorners[i].Count;
                allLen += allCorners[i].Count;
            }

            int[] allIdsConcatenatedArr = new int[allLen];
            int   index = 0;

            for (int j = 0; j < allIds.Count; ++j)
            {
                int[] idsArr = new int[(int)allIds[j].total()];
                allIds[j].get(0, 0, idsArr);

                for (int k = 0; k < idsArr.Length; ++k)
                {
                    allIdsConcatenatedArr[index + k] = (int)idsArr[k];
                }
                index += idsArr.Length;
            }

            using (Mat allIdsConcatenated = new Mat(1, allLen, CvType.CV_32SC1))
                using (Mat markerCounterPerFrame = new Mat(1, nFrames, CvType.CV_32SC1))
                {
                    List <Mat> allCornersConcatenated = new List <Mat>();
                    foreach (var c in allCorners)
                    {
                        foreach (var m in c)
                        {
                            allCornersConcatenated.Add(m);
                        }
                    }

                    allIdsConcatenated.put(0, 0, allIdsConcatenatedArr);
                    markerCounterPerFrame.put(0, 0, markerCounterPerFrameArr);

                    if (rvecs == null)
                    {
                        rvecs = new List <Mat>();
                    }
                    if (tvecs == null)
                    {
                        tvecs = new List <Mat>();
                    }

                    return(Aruco.calibrateCameraAruco(allCornersConcatenated, allIdsConcatenated, markerCounterPerFrame, board, imageSize, cameraMatrix, distCoeffs, rvecs, tvecs, calibrationFlags));
                }
        }
        private void InitializeCalibraton(Mat frameMat)
        {
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float width  = frameMat.width();
            float height = frameMat.height();

            texture          = new Texture2D(frameMat.cols(), frameMat.rows(), TextureFormat.RGBA32, false);
            texture.wrapMode = TextureWrapMode.Clamp;

            previewQuad.GetComponent <MeshRenderer>().material.mainTexture = texture;
            previewQuad.transform.localScale = new Vector3(0.2f * width / height, 0.2f, 1);

            float imageSizeScale = 1.0f;

            // set cameraparam.
            camMatrix = CreateCameraMatrix(width, height);
            Debug.Log("camMatrix " + camMatrix.dump());

            distCoeffs = new MatOfDouble(0, 0, 0, 0, 0);
            Debug.Log("distCoeffs " + distCoeffs.dump());

            // calibration camera.
            Size   imageSize      = new Size(width * imageSizeScale, height * imageSizeScale);
            double apertureWidth  = 0;
            double apertureHeight = 0;

            double[] fovx           = new double[1];
            double[] fovy           = new double[1];
            double[] focalLength    = new double[1];
            Point    principalPoint = new Point(0, 0);

            double[] aspectratio = new double[1];

            Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

            Debug.Log("imageSize " + imageSize.ToString());
            Debug.Log("apertureWidth " + apertureWidth);
            Debug.Log("apertureHeight " + apertureHeight);
            Debug.Log("fovx " + fovx[0]);
            Debug.Log("fovy " + fovy[0]);
            Debug.Log("focalLength " + focalLength[0]);
            Debug.Log("principalPoint " + principalPoint.ToString());
            Debug.Log("aspectratio " + aspectratio[0]);


            bgrMat          = new Mat(frameMat.rows(), frameMat.cols(), CvType.CV_8UC3);
            rgbaMat         = new Mat(frameMat.rows(), frameMat.cols(), CvType.CV_8UC4);
            ids             = new Mat();
            corners         = new List <Mat>();
            rejectedCorners = new List <Mat>();
            rvecs           = new List <Mat>();
            tvecs           = new List <Mat>();

            detectorParams = DetectorParameters.create();
            detectorParams.set_cornerRefinementMethod(1);// do cornerSubPix() of OpenCV.
            dictionary = Aruco.getPredefinedDictionary((int)dictionaryId);

            recoveredIdxs = new Mat();

            charucoCorners = new Mat();
            charucoIds     = new Mat();
            charucoBoard   = CharucoBoard.create((int)squaresX, (int)squaresY, chArUcoBoradSquareLength, chArUcoBoradMarkerLength, dictionary);


            allCorners = new List <List <Mat> >();
            allIds     = new List <Mat>();
            allImgs    = new List <Mat>();

            imagePoints = new List <Mat>();

            isInitialized = true;
        }
        /// <summary>
        /// Raises the webcam texture to mat helper initialized event.
        /// </summary>
        public void OnWebCamTextureToMatHelperInitialized()
        {
            Debug.Log("OnWebCamTextureToMatHelperInitialized");

            Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();

            texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGB24, false);

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;

            gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            if (fpsMonitor != null)
            {
                fpsMonitor.Add("width", webCamTextureMat.width().ToString());
                fpsMonitor.Add("height", webCamTextureMat.height().ToString());
                fpsMonitor.Add("orientation", Screen.orientation.ToString());
            }


            float width  = webCamTextureMat.width();
            float height = webCamTextureMat.height();

            float imageSizeScale = 1.0f;
            float widthScale     = (float)Screen.width / width;
            float heightScale    = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
                imageSizeScale = (float)Screen.height / (float)Screen.width;
            }
            else
            {
                Camera.main.orthographicSize = height / 2;
            }


            // set camera parameters.
            double fx;
            double fy;
            double cx;
            double cy;

            string loadDirectoryPath               = Path.Combine(Application.persistentDataPath, "ArUcoCameraCalibrationExample");
            string calibratonDirectoryName         = "camera_parameters" + width + "x" + height;
            string loadCalibratonFileDirectoryPath = Path.Combine(loadDirectoryPath, calibratonDirectoryName);
            string loadPath = Path.Combine(loadCalibratonFileDirectoryPath, calibratonDirectoryName + ".xml");

            if (useStoredCameraParameters && File.Exists(loadPath))
            {
                CameraParameters param;
                XmlSerializer    serializer = new XmlSerializer(typeof(CameraParameters));
                using (var stream = new FileStream(loadPath, FileMode.Open)) {
                    param = (CameraParameters)serializer.Deserialize(stream);
                }

                camMatrix  = param.GetCameraMatrix();
                distCoeffs = new MatOfDouble(param.GetDistortionCoefficients());

                fx = param.camera_matrix [0];
                fy = param.camera_matrix [4];
                cx = param.camera_matrix [2];
                cy = param.camera_matrix [5];

                Debug.Log("Loaded CameraParameters from a stored XML file.");
                Debug.Log("loadPath: " + loadPath);
            }
            else
            {
                int max_d = (int)Mathf.Max(width, height);
                fx = max_d;
                fy = max_d;
                cx = width / 2.0f;
                cy = height / 2.0f;

                camMatrix = new Mat(3, 3, CvType.CV_64FC1);
                camMatrix.put(0, 0, fx);
                camMatrix.put(0, 1, 0);
                camMatrix.put(0, 2, cx);
                camMatrix.put(1, 0, 0);
                camMatrix.put(1, 1, fy);
                camMatrix.put(1, 2, cy);
                camMatrix.put(2, 0, 0);
                camMatrix.put(2, 1, 0);
                camMatrix.put(2, 2, 1.0f);

                distCoeffs = new MatOfDouble(0, 0, 0, 0);

                Debug.Log("Created a dummy CameraParameters.");
            }

            Debug.Log("camMatrix " + camMatrix.dump());
            Debug.Log("distCoeffs " + distCoeffs.dump());


            // calibration camera matrix values.
            Size   imageSize      = new Size(width * imageSizeScale, height * imageSizeScale);
            double apertureWidth  = 0;
            double apertureHeight = 0;

            double[] fovx           = new double[1];
            double[] fovy           = new double[1];
            double[] focalLength    = new double[1];
            Point    principalPoint = new Point(0, 0);

            double[] aspectratio = new double[1];

            Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

            Debug.Log("imageSize " + imageSize.ToString());
            Debug.Log("apertureWidth " + apertureWidth);
            Debug.Log("apertureHeight " + apertureHeight);
            Debug.Log("fovx " + fovx [0]);
            Debug.Log("fovy " + fovy [0]);
            Debug.Log("focalLength " + focalLength [0]);
            Debug.Log("principalPoint " + principalPoint.ToString());
            Debug.Log("aspectratio " + aspectratio [0]);


            // To convert the difference of the FOV value of the OpenCV and Unity.
            double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx));
            double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy));

            Debug.Log("fovXScale " + fovXScale);
            Debug.Log("fovYScale " + fovYScale);


            // Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4
            if (widthScale < heightScale)
            {
                arCamera.fieldOfView = (float)(fovx [0] * fovXScale);
            }
            else
            {
                arCamera.fieldOfView = (float)(fovy [0] * fovYScale);
            }
            // Display objects near the camera.
            arCamera.nearClipPlane = 0.01f;


            rgbMat          = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC3);
            ids             = new Mat();
            corners         = new List <Mat> ();
            rejectedCorners = new List <Mat> ();
            rvecs           = new Mat();
            tvecs           = new Mat();
            rotMat          = new Mat(3, 3, CvType.CV_64FC1);


            detectorParams = DetectorParameters.create();
            dictionary     = Aruco.getPredefinedDictionary((int)dictionaryId);

            rvec          = new Mat();
            tvec          = new Mat();
            recoveredIdxs = new Mat();

            gridBoard = GridBoard.create(gridBoradMarkersX, gridBoradMarkersY, gridBoradMarkerLength, gridBoradMarkerSeparation, dictionary, gridBoradMarkerFirstMarker);

            charucoCorners = new Mat();
            charucoIds     = new Mat();
            charucoBoard   = CharucoBoard.create(chArUcoBoradSquaresX, chArUcoBoradSquaresY, chArUcoBoradSquareLength, chArUcoBoradMarkerLength, dictionary);

            diamondCorners = new List <Mat> ();
            diamondIds     = new Mat(1, 1, CvType.CV_32SC4);
            diamondIds.put(0, 0, new int[] { diamondId1, diamondId2, diamondId3, diamondId4 });


            // if WebCamera is frontFaceing, flip Mat.
            if (webCamTextureToMatHelper.GetWebCamDevice().isFrontFacing)
            {
                webCamTextureToMatHelper.flipHorizontal = true;
            }
        }
示例#6
0
            public static int InterpolateCornersCharuco(Std.VectorVectorPoint2f markerCorners, Std.VectorInt markerIds, Cv.Core.Mat image, CharucoBoard board,
                                                        out Std.VectorPoint2f charucoCorners, out Std.VectorInt charucoIds)
            {
                Cv.Core.Exception exception = new Cv.Core.Exception();
                System.IntPtr     charucoCornersPtr, charucoIdsPtr;

                int interpolateCorners = au_interpolateCornersCharuco3(markerCorners.cppPtr, markerIds.cppPtr, image.cppPtr, board.cppPtr,
                                                                       out charucoCornersPtr, out charucoIdsPtr, exception.cppPtr);

                charucoCorners = new Std.VectorPoint2f(charucoCornersPtr);
                charucoIds     = new Std.VectorInt(charucoIdsPtr);
                exception.Check();

                return(interpolateCorners);
            }
示例#7
0
            public static bool EstimatePoseCharucoBoard(Std.VectorPoint2f charucoCorners, Std.VectorInt charucoIds, CharucoBoard board, Cv.Core.Mat cameraMatrix,
                                                        Cv.Core.Mat distCoeffs, out Cv.Core.Vec3d rvec, out Cv.Core.Vec3d tvec)
            {
                Cv.Core.Exception exception = new Cv.Core.Exception();
                System.IntPtr     rvecPtr, tvecPtr;

                bool valid = au_estimatePoseCharucoBoard(charucoCorners.cppPtr, charucoIds.cppPtr, board.cppPtr, cameraMatrix.cppPtr, distCoeffs.cppPtr,
                                                         out rvecPtr, out tvecPtr, exception.cppPtr);

                rvec = new Cv.Core.Vec3d(rvecPtr);
                tvec = new Cv.Core.Vec3d(tvecPtr);

                exception.Check();
                return(valid);
            }
示例#8
0
            public static double CalibrateCameraCharuco(Std.VectorVectorPoint2f charucoCorners, Std.VectorVectorInt charucoIds, CharucoBoard board, Cv.Core.Size imageSize,
                                                        Cv.Core.Mat cameraMatrix, Cv.Core.Mat distCoeffs)
            {
                Cv.Core.Exception exception = new Cv.Core.Exception();

                double reProjectionError = au_calibrateCameraCharuco5(charucoCorners.cppPtr, charucoIds.cppPtr, board.cppPtr, imageSize.cppPtr, cameraMatrix.cppPtr,
                                                                      distCoeffs.cppPtr, exception.cppPtr);

                exception.Check();
                return(reProjectionError);
            }
示例#9
0
            public static double CalibrateCameraCharuco(Std.VectorVectorPoint2f charucoCorners, Std.VectorVectorInt charucoIds, CharucoBoard board, Cv.Core.Size imageSize,
                                                        Cv.Core.Mat cameraMatrix, Cv.Core.Mat distCoeffs, out Std.VectorMat rvecs, out Std.VectorMat tvecs)
            {
                Cv.Core.Exception exception = new Cv.Core.Exception();
                System.IntPtr     rvecsPtr, tvecsPtr;

                double reProjectionError = au_calibrateCameraCharuco3(charucoCorners.cppPtr, charucoIds.cppPtr, board.cppPtr, imageSize.cppPtr, cameraMatrix.cppPtr,
                                                                      distCoeffs.cppPtr, out rvecsPtr, out tvecsPtr, exception.cppPtr);

                rvecs = new Std.VectorMat(rvecsPtr);
                tvecs = new Std.VectorMat(tvecsPtr);

                exception.Check();
                return(reProjectionError);
            }
示例#10
0
        private void CreateMaeker()
        {
            if (markerImg.cols() != markerSize)
            {
                markerImg.Dispose();
                markerImg = new Mat(markerSize, markerSize, CvType.CV_8UC3);
                texture   = new Texture2D(markerImg.cols(), markerImg.rows(), TextureFormat.RGB24, false);
            }
            else
            {
                markerImg.setTo(Scalar.all(255));
            }

            gameObject.transform.localScale = new Vector3(markerImg.cols(), markerImg.rows(), 1);

            float width  = markerImg.width() / 0.7f;
            float height = markerImg.height() / 0.7f;

            float widthScale  = (float)Screen.width / width;
            float heightScale = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize       = (width * (float)Screen.height / (float)Screen.width) / 2;
                gameObject.transform.localPosition = new Vector3(0, -height * 0.1f, 0);
            }
            else
            {
                Camera.main.orthographicSize       = height / 2;
                gameObject.transform.localPosition = new Vector3(width * 0.1f, 0, 0);
            }

            // create dictinary.
            Dictionary dictionary = Aruco.getPredefinedDictionary((int)dictionaryId);

            // draw marker.
            switch (markerType)
            {
            default:
            case MarkerType.CanonicalMarker:
                Aruco.drawMarker(dictionary, (int)markerId, markerSize, markerImg, borderBits);
                Debug.Log("draw CanonicalMarker: " + "dictionaryId " + (int)dictionaryId + " markerId " + (int)markerId + " sidePixels " + markerSize + " borderBits " + borderBits);
                break;

            case MarkerType.GridBoard:
                GridBoard gridBoard = GridBoard.create(gridBoradMarkersX, gridBoradMarkersY, gridBoradMarkerLength, gridBoradMarkerSeparation, dictionary, gridBoradMarkerFirstMarker);
                gridBoard.draw(new Size(markerSize, markerSize), markerImg, gridBoradMarginSize, borderBits);
                gridBoard.Dispose();
                Debug.Log("draw GridBoard: " + "markersX " + gridBoradMarkersX + " markersY " + gridBoradMarkersY + " markerLength " + gridBoradMarkerLength +
                          " markerSeparation " + gridBoradMarkerSeparation + "dictionaryId " + (int)dictionaryId + " outSize " + markerSize + " marginSize " + gridBoradMarginSize + " borderBits " + borderBits);
                break;

            case MarkerType.ChArUcoBoard:
                CharucoBoard charucoBoard = CharucoBoard.create(chArUcoBoradMarkersX, chArUcoBoradMarkersY, chArUcoBoradSquareLength, chArUcoBoradMarkerLength, dictionary);
                charucoBoard.draw(new Size(markerSize, markerSize), markerImg, chArUcoBoradMarginSize, borderBits);
                charucoBoard.Dispose();
                Debug.Log("draw ChArUcoBoard: " + "markersX " + chArUcoBoradMarkersX + " markersY " + chArUcoBoradMarkersY + " markerLength " + chArUcoBoradSquareLength +
                          " markerSeparation " + chArUcoBoradMarkerLength + "dictionaryId " + (int)dictionaryId + " outSize " + markerSize + " marginSize " + chArUcoBoradMarginSize + " borderBits " + borderBits);
                break;
            }

            Utils.matToTexture2D(markerImg, texture);
        }
示例#11
0
 public static int InterpolateCornersCharuco(Std.VectorVectorPoint2f markerCorners, Std.VectorInt markerIds, Cv.Mat image, CharucoBoard board,
                                             out Std.VectorPoint2f charucoCorners, out Std.VectorInt charucoIds)
 {
     Cv.Mat cameraMatrix = new Cv.Mat();
     return(InterpolateCornersCharuco(markerCorners, markerIds, image, board, out charucoCorners, out charucoIds, cameraMatrix));
 }
示例#12
0
        public static int InterpolateCornersCharuco(Std.VectorVectorPoint2f markerCorners, Std.VectorInt markerIds, Cv.Mat image, CharucoBoard board,
                                                    out Std.VectorPoint2f charucoCorners, out Std.VectorInt charucoIds, Cv.Mat cameraMatrix, Cv.Mat distCoeffs)
        {
            Cv.Exception exception = new Cv.Exception();
            IntPtr       charucoCornersPtr, charucoIdsPtr;

            int interpolateCorners = au_interpolateCornersCharuco(markerCorners.CppPtr, markerIds.CppPtr, image.CppPtr, board.CppPtr,
                                                                  out charucoCornersPtr, out charucoIdsPtr, cameraMatrix.CppPtr, distCoeffs.CppPtr, exception.CppPtr);

            charucoCorners = new Std.VectorPoint2f(charucoCornersPtr);
            charucoIds     = new Std.VectorInt(charucoIdsPtr);
            exception.Check();

            return(interpolateCorners);
        }
示例#13
0
 public static double CalibrateCameraCharuco(Std.VectorVectorPoint2f charucoCorners, Std.VectorVectorInt charucoIds, CharucoBoard board,
                                             Cv.Size imageSize, Cv.Mat cameraMatrix, Cv.Mat distCoeffs)
 {
     Std.VectorMat rvecs;
     return(CalibrateCameraCharuco(charucoCorners, charucoIds, board, imageSize, cameraMatrix, distCoeffs, out rvecs));
 }
示例#14
0
 public static double CalibrateCameraCharuco(Std.VectorVectorPoint2f charucoCorners, Std.VectorVectorInt charucoIds, CharucoBoard board,
                                             Cv.Size imageSize, Cv.Mat cameraMatrix, Cv.Mat distCoeffs, out Std.VectorMat rvecs, out Std.VectorMat tvecs, Cv.Calib flags = 0)
 {
     Cv.TermCriteria criteria = new Cv.TermCriteria(Cv.TermCriteria.Type.Count | Cv.TermCriteria.Type.Eps, 30, Cv.EPSILON);
     return(CalibrateCameraCharuco(charucoCorners, charucoIds, board, imageSize, cameraMatrix, distCoeffs, out rvecs, out tvecs, flags, criteria));
 }
示例#15
0
        public static double CalibrateCameraCharuco(Std.VectorVectorPoint2f charucoCorners, Std.VectorVectorInt charucoIds, CharucoBoard board,
                                                    Cv.Size imageSize, Cv.Mat cameraMatrix, Cv.Mat distCoeffs, out Std.VectorMat rvecs, out Std.VectorMat tvecs, Cv.Calib flags,
                                                    Cv.TermCriteria criteria)
        {
            Cv.Exception exception = new Cv.Exception();
            IntPtr       rvecsPtr, tvecsPtr;

            double reProjectionError = au_calibrateCameraCharuco(charucoCorners.CppPtr, charucoIds.CppPtr, board.CppPtr, imageSize.CppPtr,
                                                                 cameraMatrix.CppPtr, distCoeffs.CppPtr, out rvecsPtr, out tvecsPtr, (int)flags, criteria.CppPtr, exception.CppPtr);

            rvecs = new Std.VectorMat(rvecsPtr);
            tvecs = new Std.VectorMat(tvecsPtr);

            exception.Check();
            return(reProjectionError);
        }