// ArucoCamera methods /// <summary> /// Configure the webcam and its properties with the id <see cref="WebcamId"/>. The camera needs to be stopped before configured. /// </summary> public override void Configure() { if (IsStarted || startInitiated) { return; } IsConfigured = false; // Try to load the webcam WebCamDevice[] webcamDevices = WebCamTexture.devices; if (webcamDevices.Length <= WebcamId) { IsConfigured = false; throw new System.ArgumentException("The webcam with the id '" + WebcamId + "' is not found.", "WebcamId"); } WebCamDevice = webcamDevices[WebcamId]; WebCamTexture = new WebCamTexture(WebCamDevice.name); Name = webcamDevices[WebcamId].name; // Try to load the camera parameters if (CameraParametersFilePath != null && CameraParametersFilePath.Length > 0) { string fullCameraParametersFilePath = Path.Combine((Application.isEditor) ? Application.dataPath : Application.persistentDataPath, CameraParametersFilePath); CameraParameters = CameraParameters.LoadFromXmlFile(fullCameraParametersFilePath); } // Configure the image correct orientation flipHorizontallyImages = true; flipVerticallyImages = false; base.Configure(); }
// ArucoCamera methods /// <summary> /// Configure the VR input tracking, the Ovrvision plugin, and auto-start the cameras. The cameras need to be stopped before configured. /// </summary> public override void Configure() { if (IsStarted) { return; } // Reset state IsConfigured = false; // Update VR tracking UnityEngine.VR.InputTracking.Recenter(); // Try to load the camera parameters if (CameraParametersFilePath != null && CameraParametersFilePath.Length > 0) { string fullCameraParametersFilePath = Path.Combine((Application.isEditor) ? Application.dataPath : Application.persistentDataPath, CameraParametersFilePath); CameraParameters = CameraParameters.LoadFromXmlFile(fullCameraParametersFilePath); } base.Configure(); }
/// <summary> /// Calibrate each camera of the <see cref="ArucoObjectDetector.ArucoCamera"/> system using the detected markers added with /// <see cref="AddCurrentFrameForCalibration()"/>, the <see cref="CameraParameters"/>, the <see cref="CalibrationFlagsController"/> and save /// the results on a calibration file. Stereo calibrations will be additionally executed on these results for every camera pair in /// <see cref="StereoCalibrationCameraPairs"/>. /// </summary> public void Calibrate() { // Prepare data Aruco.CharucoBoard charucoBoard = CalibrationBoard.Board as Aruco.CharucoBoard; // Check if there is enough captured frames for calibration for (int cameraId = 0; cameraId < ArucoCamera.CameraNumber; cameraId++) { if (charucoBoard == null && MarkerIds[cameraId].Size() < 1) { throw new Exception("Need at least one frame captured for the camera " + (cameraId + 1) + "/" + ArucoCamera.CameraNumber + " to calibrate."); } else if (charucoBoard != null && MarkerIds[cameraId].Size() < 4) { throw new Exception("Need at least four frames captured for the camera " + (cameraId + 1) + "/" + ArucoCamera.CameraNumber + " to calibrate with a ChAruco board."); } } // Load the camera parameters if they exist string cameraParametersFilePath; string cameraParametersFolderPath = Path.Combine((Application.isEditor) ? Application.dataPath : Application.persistentDataPath, CalibrationFolder); if (!Directory.Exists(cameraParametersFolderPath)) { Directory.CreateDirectory(cameraParametersFolderPath); } if (CalibrationFilename != null && CalibrationFilename.Length > 0) { cameraParametersFilePath = cameraParametersFolderPath + CalibrationFilename; CameraParameters = CameraParameters.LoadFromXmlFile(cameraParametersFilePath); if (CameraParameters.CameraNumber != ArucoCamera.CameraNumber) { throw new ArgumentException("The loaded camera parameters from the file '" + cameraParametersFilePath + "' is for a system with " + CameraParameters.CameraNumber + " camera. But the current calibrating camera has " + ArucoCamera.CameraNumber + ". These numbers" + " must be equal.", "CalibrationFilename"); } } // Or initialize the camera parameters else { CameraParameters = new CameraParameters(ArucoCamera.CameraNumber) { CalibrationFlagsValue = CalibrationFlagsController.CalibrationFlagsValue, FixAspectRatioValue = (calibrationFlagsPinholeController) ? calibrationFlagsPinholeController.FixAspectRatioValue : 0 }; for (int cameraId = 0; cameraId < ArucoCamera.CameraNumber; cameraId++) { CameraParameters.ImageHeights[cameraId] = ArucoCamera.ImageTextures[cameraId].height; CameraParameters.ImageWidths[cameraId] = ArucoCamera.ImageTextures[cameraId].width; double cameraMatrixAspectRatio = (calibrationFlagsPinholeController && calibrationFlagsPinholeController.FixAspectRatio) ? calibrationFlagsPinholeController.FixAspectRatioValue : 1.0; CameraParameters.CameraMatrices[cameraId] = new Cv.Mat(3, 3, Cv.Type.CV_64F, new double[9] { cameraMatrixAspectRatio, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0 }); CameraParameters.DistCoeffs[cameraId] = new Cv.Mat(); CameraParameters.OmnidirXis[cameraId] = new Cv.Mat(); } } // Calibrate each camera Std.VectorVectorPoint3f[] objectPoints = new Std.VectorVectorPoint3f[ArucoCamera.CameraNumber]; Std.VectorVectorPoint2f[] imagePoints = new Std.VectorVectorPoint2f[ArucoCamera.CameraNumber]; for (int cameraId = 0; cameraId < ArucoCamera.CameraNumber; cameraId++) { // Get objet and image calibration points from detected ids and corners Std.VectorVectorPoint3f boardObjectPoints = new Std.VectorVectorPoint3f(); Std.VectorVectorPoint2f boardImagePoints = new Std.VectorVectorPoint2f(); uint frameCount = MarkerCorners[cameraId].Size(); for (uint frame = 0; frame < frameCount; frame++) { Std.VectorPoint3f frameObjectPoints; Std.VectorPoint2f frameImagePoints; Aruco.GetBoardObjectAndImagePoints(CalibrationBoard.Board, MarkerCorners[cameraId].At(frame), MarkerIds[cameraId].At(frame), out frameObjectPoints, out frameImagePoints); boardObjectPoints.PushBack(frameObjectPoints); boardImagePoints.PushBack(frameImagePoints); } objectPoints[cameraId] = boardObjectPoints; imagePoints[cameraId] = boardImagePoints; // Calibrate the camera Cv.Size imageSize = ArucoCamera.Images[cameraId].Size; Std.VectorVec3d rvecs, tvecs; if (calibrationFlagsPinholeController) { CameraParameters.ReprojectionErrors[cameraId] = Cv.CalibrateCamera(boardObjectPoints, boardImagePoints, imageSize, CameraParameters.CameraMatrices[cameraId], CameraParameters.DistCoeffs[cameraId], out rvecs, out tvecs, calibrationFlagsPinholeController.CalibrationFlags); } else if (calibrationFlagsOmnidirController) { CameraParameters.ReprojectionErrors[cameraId] = Cv.Omnidir.Calibrate(boardObjectPoints, boardImagePoints, imageSize, CameraParameters.CameraMatrices[cameraId], CameraParameters.OmnidirXis[cameraId], CameraParameters.DistCoeffs[cameraId], out rvecs, out tvecs, calibrationFlagsOmnidirController.CalibrationFlags); } else { rvecs = new Std.VectorVec3d(); tvecs = new Std.VectorVec3d(); } // If the used board is a charuco board, refine the calibration if (charucoBoard != null) { // Prepare data to refine the calibration Std.VectorVectorPoint3f charucoObjectPoints = new Std.VectorVectorPoint3f(); Std.VectorVectorPoint2f charucoImagePoints = new Std.VectorVectorPoint2f(); for (uint frame = 0; frame < frameCount; frame++) { // Interpolate charuco corners using camera parameters Std.VectorPoint2f charucoCorners; Std.VectorInt charucoIds; Aruco.InterpolateCornersCharuco(MarkerCorners[cameraId].At(frame), MarkerIds[cameraId].At(frame), CameraImages[cameraId].At(frame), charucoBoard, out charucoCorners, out charucoIds); charucoImagePoints.PushBack(charucoCorners); // Join the object points corresponding to the detected markers charucoObjectPoints.PushBack(new Std.VectorPoint3f()); uint markerCount = charucoIds.Size(); for (uint marker = 0; marker < markerCount; marker++) { uint pointId = (uint)charucoIds.At(marker); Cv.Point3f objectPoint = charucoBoard.ChessboardCorners.At(pointId); charucoObjectPoints.At(frame).PushBack(objectPoint); } } objectPoints[cameraId] = boardObjectPoints; // Refine the calibration if (calibrationFlagsPinholeController) { CameraParameters.ReprojectionErrors[cameraId] = Cv.CalibrateCamera(charucoObjectPoints, charucoImagePoints, imageSize, CameraParameters.CameraMatrices[cameraId], CameraParameters.DistCoeffs[cameraId], out rvecs, out tvecs, calibrationFlagsPinholeController.CalibrationFlags); } else if (calibrationFlagsOmnidirController) { CameraParameters.ReprojectionErrors[cameraId] = Cv.Omnidir.Calibrate(boardObjectPoints, boardImagePoints, imageSize, CameraParameters.CameraMatrices[cameraId], CameraParameters.OmnidirXis[cameraId], CameraParameters.DistCoeffs[cameraId], out rvecs, out tvecs, calibrationFlagsOmnidirController.CalibrationFlags); } } // Save calibration extrinsic parameters Rvecs[cameraId] = rvecs; Tvecs[cameraId] = tvecs; } // If required, apply a stereo calibration and save the resuts in the camera parameters CameraParameters.StereoCameraParametersList = new StereoCameraParameters[StereoCalibrationCameraPairs.Length]; for (int i = 0; i < StereoCalibrationCameraPairs.Length; i++) { CameraParameters.StereoCameraParametersList[i] = StereoCalibrationCameraPairs[i].Calibrate(ArucoCamera, CameraParameters, objectPoints, imagePoints); } IsCalibrated = true; // Save the camera parameters cameraParametersFilePath = cameraParametersFolderPath; if (CalibrationFilename != null && CalibrationFilename.Length > 0) { cameraParametersFilePath += CalibrationFilename; } else { cameraParametersFilePath += ArucoCamera.Name + " - " + DateTime.Now.ToString("yyyy-MM-dd_HH-mm-ss") + ".xml"; } CameraParameters.SaveToXmlFile(cameraParametersFilePath); }